## 20100513

### source code for converting b&w images to pure HTML

imagefake.py

   1:

   2:  import sys

   3:  import re

   4:  import os

   5:

   6:  '''

   7:  open html file

   8:  find all image tag

   9:  for each image tag

  10:  remove it

  11:  strip out the image name

  12:  find that image

  13:  convert it to HTML

  14:  replace the image tag with this html

  15:  '''

  16:

  17:  infilename  = sys.argv[1]

  18:  outfilename = sys.argv[2]

  19:  infile  = open( infilename,"r")

  20:  outfile = open(outfilename,"w")

  21:

  22:  document = "".join(infile.readlines())

  23:

  24:  image_tags = re.findall(r'<IMG [^>]*>',document)

  25:

  26:  print image_tags

  27:

  28:  urls = map(lambda s:s.split('SRC=')[1].split('"')[1],image_tags)

  29:

  30:  os.system("javac *.java")

  31:

  32:  for tag in image_tags:

  33:      url = (lambda s:s.split('SRC=')[1].split('"')[1])(tag)

  34:      os.system("rm temp.html.stub; java imagehack %s temp.html"%url)

  35:      print "reading temp.html"

  36:      try:

  37:          f   = open("temp.html.stub",'r')

  38:          obj = "".join(f.readlines())

  39:          f.close()

  40:          document = re.sub(tag,obj,document)

  41:      except IOError:

  42:          pass

  43:

  44:  try:

  45:      f   = open("temp.html.headers",'r')

  46:      headers = "".join(f.readlines())

  47:      f.close()

  48:      document = re.sub(r'</HEAD>',headers,document)

  49:  except IOError:

  50:      pass

  51:

  52:  outfile.write(document)

  53:

  54:

  55:  infile .close()

  56:  outfile.close()

  57:

imagehack.java

   1:  import java.awt.image.BufferedImage;

   2:  import java.io.BufferedOutputStream;

   3:  import java.io.File;

   4:  import java.io.FileOutputStream;

   5:  import java.io.IOException;

   6:  import java.io.PrintWriter;

   7:  import java.util.HashSet;

   8:  import java.util.Set;

   9:  import javax.imageio.ImageIO;

  10:  import java.net.URL;

  11:  import static java.lang.System.*;

  12:  public class imagehack {

  13:      static int depth=8;

  14:      static int D=depth-1;

  15:      static class block {

  16:          public int x,y,w,h,c;

  17:          public block(int a,int b,int f, int d, int e) {x=a;y=b;w=f;h=d;c=e;}

  18:          public boolean equals(block b) {return x==b.x&&y==b.y&&w==b.w&&h==b.h&&c==b.c;}}

  19:      static int foo(int x) { return x/(256/depth); }

  20:      static String tohex(int x) {

  21:          x*=(256/depth);

  22:          return ""+("0123456789ABCDEF".charAt((x>>4)&0xf))+("0123456789ABCDEF".charAt(x&0xf)); }

  23:      public static void main(String[] args) throws IOException {

  24:          String filename = args[0];

  25:          String outputname = args[1];

  26:          out.println("input "+filename);

  27:          out.println("output "+outputname);

  28:          BufferedImage hold;

  29:          try {

  30:              hold = ImageIO.read(new File(filename));

  31:          } catch (Exception e) {

  32:              hold = ImageIO.read(new URL(filename));

  33:          }

  34:          int w = hold.getWidth();

  35:          int h = hold.getHeight();

  36:          Set<block> blocks = new HashSet<block>();

  37:          int[][] data = new int[h][w];

  38:          for (int i=0;i<h;i++) {

  39:              int linestart = -1; int linecolor = D;

  40:              for (int j=0;j<w;j++) {

  41:                  int c=foo(hold.getRGB(j,i)&0xff);

  42:                  if (c!=linecolor) {

  43:                      if (j>0 && linecolor<D)

  44:                          blocks.add(new block(i,linestart,j-linestart,1,linecolor));

  45:                      linecolor=c; linestart=j; } }

  46:              if (linecolor<D)

  47:                  blocks.add(new block(i,linestart,w-linestart,1,linecolor)); }

  48:          out.println("initially "+blocks.size()+" blocks");

  49:          if (blocks.size()>1000) return;

  50:          String wrap =

  51:          "#dumbwrap1 {\n"+

  52:          "  margin:0px;\n"+

  53:          "  padding:0px;\n"+

  54:          "  border:0px none;\n"+

  55:          "  background:#fff;\n"+

  56:          "  position:static;\n"+

  57:          "  display:inline-block;\n"+

  58:          "}";

  59:          String images =

  60:          "#imgwrap2 {\n"+

  61:          "  position:relative;\n"+

  62:          "}";

  63:          File ofile = new File(outputname);

  64:          PrintWriter p = new PrintWriter(new BufferedOutputStream(new FileOutputStream(ofile)));

  65:          p.println("<head>");

  66:          String css = "";

  67:          css += "<style type=\"text/css\">\n";

  68:          for (int i=0; i<D; i++) {

  69:              String c = tohex(i);

  70:              css +=

  71:              "#bb"+c+" {\n"+

  72:              "  margin:0px;\n"+

  73:              "  padding:0px;\n"+

  74:              "  border:0px none;\n"+

  75:              "  background:#"+c+c+c+";\n"+

  76:              "  position:absolute;\n"+

  77:              "}\n"; }

  78:          css += wrap;

  79:          css += images;

  80:          css += "</style>";

  81:          p.println(css);

  82:          p.println("</head>");

  83:          p.println("<body>");

  84:          String imagecode = "";

  85:          imagecode += "<div id='dumbwrap1' style='width:"+w+"px;height:"+h+"px;'>";

  86:          imagecode += "<div id='imgwrap2'>";

  87:          for (block b : blocks)

  88:              imagecode += "<div id='bb"+tohex(b.c)+

  89:              "' style='top:"+b.x+"px;left:"+b.y+"px;width:"+b.w+

  90:              "px;height:"+b.h+"px;'></div>";

  91:          imagecode +="</div></div>";

  92:          p.print(imagecode);

  93:          p.println("</body>");

  94:          p.flush();p.close();

  95:

  96:          //print just the image if external header is used

  97:          ofile = new File(outputname);

  98:          p = new PrintWriter(new BufferedOutputStream(new FileOutputStream(ofile+".stub")));

  99:          p.print(imagecode);

 100:          p.flush();p.close();

 101:

 102:          ofile = new File(outputname);

 103:          p = new PrintWriter(new BufferedOutputStream(new FileOutputStream(ofile+".headers")));

 104:          p.println(css);

 105:          p.flush();p.close();

 106:      }

 107:  }

### Testing Automatic Conversion of Equations to Pure HTML

Looks pretty good to me, though this doesn't play well with browser-resizing of the page.

## 20100512

### Correlating metrics of a spontaneously active neural network undergoing up-down transitions

Page ranking of nodes in directed graphs is correlated with the mean firing rate of a neuron in a spontaneously active network. However, these metrics are not any more correlated to firing rate than a simple sum of all incoming synapse weights. Neither mean rates nor several variation of Page rank were strongly correlated with the time that a unit participated in an upstate. All graph metrics examined were more correlated with average firing rate than they were with sequence position in an upstate.

Metrics investigated

Firing Rate Metrics :

• Mean firing rate ( “rates” ) : Mean firing rate is taken as the average of a unit's firing rate over the course of the simulation.
• Mean log-firing rate ( “log-rates” ) : Mean firing rate is taken as the average of the natural logarithm of a unit's firing rate over the course of the simulation.

Sequence Position in Upstate Metrics :

• Time of first local maximum during an up-state ( “t(max)” ) : This is the the time, relative to upstate onset, of the first local maximum in firing rate for each unit.
• Center-of-mass metric with rate~mass and time~space ( “log center” ) : This is the normalized sum of the product of firing rate and time since upstate onset for each unit.
• Center-of-mass metric with log-rate~mass and time~space ( “center” ) : This is the normalized sum of the product of the natural logarithm of firing rate and time since upstate onset for each unit.

Metrics Based on Unit Inputs :

• Sum of all incoming synapse weights ( “all” ) : This is the sum of all incoming synaptic weights for each unit
• Sum of all excitatory synapse weights (“e”) : This is the sum of all incoming excitatory synaptic weights for each unit
• Sum of all inhibitory synapse weights (“i”) : This is the sum of all incoming inhibitory synaptic weights for each unit

Metrics Based on Network Connectivity Graph :

• Page rank using only excitatory synapse edges (“e only”)
• Page rank ( magnitude of a node's component in the principle eigenvector of the graph matrix ) is computed using a directed graph representing only excitatory synapses.
• Page rank using exponential to rectify negative synapses (“exp”)
• Page rank is computed on a directed graph built from the network connectivity matrix, but with edge weights equal to the exponential of the network synapse weight. This is a hack, so to speak. Having negative edge weights often created a graph matrix with no real eigenvalues.
• Page rank using arc-tangent to rectify negative synapses (“atan”)
• Page rank using $1/(1+exp(-x))$ to rectify negative synapses (“nonl”)

Correlations Between all Metrics

 rates log-rates t(max) center log-center all e i e only exp atan nonl rates 1 0.41 -0.36 0.68 -0.68 0.56 0.45 -0.55 0.45 0.55 0.56 0.56 log-rates 0.41 1 -0.66 0.47 -0.51 0.7 0.59 -0.66 0.60 0.70 0.70 0.7 t(max) -0.36 -0.66 1 -0.58 0.70 -0.26 -0.21 0.26 -0.21 -0.26 -0.27 -0.27 center 0.68 0.47 -0.58 1 -0.77 0.27 0.19 -0.28 0.19 0.26 0.27 0.27 ln(center) -0.68 -0.51 0.70 -0.77 1 -0.23 -0.16 0.25 -0.16 -0.22 -0.23 -0.23 all 0.56 0.70 -0.26 0.27 -0.23 1 0.87 -0.93 0.87 0.99 1.00 1.00 e 0.45 0.59 -0.21 0.19 -0.16 0.87 1 -0.63 1.00 0.91 0.87 0.87 i -0.55 -0.66 0.26 -0.28 0.25 -0.93 -0.63 1 -0.63 -0.89 -0.93 -0.93 pos 0.45 0.60 -0.21 0.19 -0.16 0.87 1.00 -0.63 1 0.91 0.87 0.87 exp 0.55 0.70 -0.26 0.26 -0.22 0.99 0.91 -0.89 0.91 1 1.00 0.99 atan 0.56 0.70 -0.27 0.27 -0.23 1.00 0.87 -0.93 0.87 1.00 1 1.00 nonl 0.56 0.7 -0.27 0.27 -0.23 1.00 0.87 -0.93 0.87 0.99 1.00 1

Summary of correlations between metrics :

No metric taken from network connectivity graph was strongly correlated with any of the sequence position metrics for upstates. However, these graph metrics were correlated with the average log-firing-rate of units. Using Page rank to account for local graph structure doesn't reveal more than looking at the excitatory or inhibitory bias of of synapses into a unit, as can been seen by the high correlation between metrics that just look at the inputs of single units and metrics that look at the surrounding graph. This phenomena might be limited to the particular connectivity graph used in this simulation. Higher mean firing rates were correlated with earlier participation in upstates, as measured by the center-of-mass of firing rate patterns during upstate.

Network properties :

This was a sparse network of 1024 nodes, each with 500 incoming connections. An equal number of inhibitory and excitatory connections were present in the network. There were no inhibitory or excitatory cells, only negative synapses. Inhibitory synapse weights ranged uniformly in (0,5] and excitatory synapse weights ranged uniformly in [0,3.52). Firing rate dynamics were simulated for firing rate variable U and adaptation variable V as follows :

$f(x)=1/(1+e^{-x})$

$\dot{U}=f(\sum_k {w_k U_k } - \gamma V )$

$\tau \dot{V}=U - b V$

With $\gamma=\tau=0.602, b=0.14359$, simulated with $\Deltat=0.5 ms$, and subscript k represents the kth incoming synapse to the unit. This network underwent spontaneous transitions between periods of high activity and low activity. Ten seconds of simulation time were recorded for this analysis.

Histogram of mean network firing rate, fit to a mixture of two gaussians. Note the bimodal distribution of rates suggesting up-down state activity. Network states with activity more likely belonging to the high rate gaussian were classified as upstates. A total of 36 upstates were recorded in the simulation run. Upstates were classified by fitting the population average rate distribution to a pair of Gaussians, and assigning states with the higher activity as up-states. States shorter than 50ms were discarded, leaving 31 upstates for analysis.

The distribution of firing rates follows a log-normal distribution.

General note : Don't expect Page rank to be useful on all but the most sparse random graphs.

Intuitively, Page rank becomes more useful as the network connectivity becomes less homogeneous. We are interested in understanding the utility of Page rank on random graphs. Page rank takes into account the excitability of incoming nodes, (in addition to edge weights) when determining the excitability of a given node. The more variance in the Page ranking of nodes, the more useful factoring in the ranks of incoming nodes becomes. A large number of connections will have an averaging effect if drawn randomly from the population, reducing the utility of Page rank. It is possible that, in order for Page ranking to be especially informative, the graph must either me sufficiently sparse such that random incoming connections do not average out to near the mean network excitability, or graph structure needs to be non-random to explicitly add heterogeneity. (I should investigate if a connectivity structure as in Koulakov et al has connectivity substantially different than a random graph, and test how this alters the analysis performed in this report). Note that Page rank seems to be marginally useful to Google, which is operating on a random graph with a power law degree distribution. It seems unlikely that neural systems would have a power law degree distribution.

The sum of the weights on incoming nodes is a first approximation to Page rank. In the iterated matrix multiplication method for finding Page rank, the linear sum of incoming connection weights is the value after one iteration. Intuitively then, the faster the computation of Page rank converges, the more similar it should be to the first approximation. Graphs with short mixing times, or graphs that are good expanders, should converge rapidly in the Page rank computation, since it is equivalent to the rate of random walk mixing. Alternatively, the rate of Page rank convergence is related to the ratio of the first two graph eigenvalues, and is therefore related to various notions of graph expansion. Therefore, Page rank is more useful for analyzing graphs that are poorly connected. Since random graphs are good expanders with high probability, we would not expect Page rank to be particularly more informative than the incoming edge weights in random graphs.

The Brain is not the Internet. The Brain probably isn't even a random graph. The existence of modulatory and inhibitory synapses confuse some of the graph metrics designed to analyze information flow in purely excitatory feed-forward systems.