Skip to main content
  • Home
  • Development
  • Documentation
  • Donate
  • Operational login
  • Browse the archive

swh logo
SoftwareHeritage
Software
Heritage
Archive
Features
  • Search

  • Downloads

  • Save code now

  • Add forge now

  • Help

https://github.com/maartenpaul/DBD_tracking
08 February 2026, 18:47:12 UTC
  • Code
  • Branches (2)
  • Releases (0)
  • Visits
Revision 0a23528a842120c416d6e5e62f3036936363a601 authored by maartenpaul on 07 April 2017, 10:46:31 UTC, committed by GitHub on 07 April 2017, 10:46:31 UTC
Create README.md
1 parent 0ab42d8
  • Files
  • Changes
    • Branches
    • Releases
    • HEAD
    • refs/heads/development
    • refs/heads/master
    • 0a23528a842120c416d6e5e62f3036936363a601
    No releases to show
  • 64e5348
  • /
  • R
  • /
  • MSD analysis v2.0.R
Raw File Download Save again
Take a new snapshot of a software origin

If the archived software origin currently browsed is not synchronized with its upstream version (for instance when new commits have been issued), you can explicitly request Software Heritage to take a new snapshot of it.

Use the form below to proceed. Once a request has been submitted and accepted, it will be processed as soon as possible. You can then check its processing state by visiting this dedicated page.
swh spinner

Processing "take a new snapshot" request ...

To reference or cite the objects present in the Software Heritage archive, permalinks based on SoftWare Hash IDentifiers (SWHIDs) must be used.
Select below a type of object currently browsed in order to display its associated SWHID and permalink.

  • revision
  • directory
  • content
  • snapshot
origin badgerevision badge
swh:1:rev:0a23528a842120c416d6e5e62f3036936363a601
origin badgedirectory badge
swh:1:dir:69f5520d7b33d36178e01d5221b42f4c1637254b
origin badgecontent badge
swh:1:cnt:e953f7d1033e170890b6c52ecf515425c5e2d760
origin badgesnapshot badge
swh:1:snp:f14cfe52ad9e74434b7288f4e59ba10f11fe3d0f

This interface enables to generate software citations, provided that the root directory of browsed objects contains a citation.cff or codemeta.json file.
Select below a type of object currently browsed in order to generate citations for them.

  • revision
  • directory
  • content
  • snapshot
(requires biblatex-software package)
Generating citation ...
(requires biblatex-software package)
Generating citation ...
(requires biblatex-software package)
Generating citation ...
(requires biblatex-software package)
Generating citation ...
Tip revision: 0a23528a842120c416d6e5e62f3036936363a601 authored by maartenpaul on 07 April 2017, 10:46:31 UTC
Create README.md
Tip revision: 0a23528
MSD analysis v2.0.R
##notes
#interesting statistics
#- MSD points, MSD fit
#- mean speed
# filter for longer tracks


library(plyr)

##Functions
MSD <- function(tracks,n=5,framerate=30,pxsize=100){
  tracks[,2] <- (tracks[,2]*pxsize)/1000
  tracks[,3] <- (tracks[,3]*pxsize)/1000
  
  coef <- ddply(tracks,.variables = "track",.fun= function(x) {
    result <- vector(length = n) 
  #put first frame of track at zero
    x[,1]  <- x[,1]-x[1,1]+1
   if(nrow(x)>n+1){
    for (j in 1:n){
      sum <- 0
      n_sum <- 0
      for(k in 1:(nrow(x))){
        if (is.element(x[k,1]+j,x[,1])){
          which_point <- which(x[,1]==x[k,1]+j)
          sum <- sum+((x[k,2]-x[which_point,2])^2+(x[k,3]-x[which_point,3])^2)
          n_sum <- n_sum+1
        }
      }
    result[j] <- sum/n_sum
   
  }
  return(result)
   }
    
  })
  return(coef)
}

MSD_fit <- function(x,n=5,fitzero=T,framerate=1/33,pxsize=100,offset=0.01){

    #apply function for every track (column V4)
    coef <- adply(x,.margins=1, .expand=FALSE,function(x) {
        mst <- t(x[,-1])
        time <- seq(1,length(mst),1)
        time <- time/framerate/1000 #covert frame to second
        #plot(mst,ylim=c(0,0.5),xlim=c(0,10))

      if(!fitzero){
        mst <- mst-offset
      }
          #fit <- lm(mst[,1] ~ time+0)
          # fit <- tryCatch(nls(formula = mst ~ time*x1+0,lower = list(x1=0,x2=-1),start=list(x1=0,x2=0),algorithm = "port"), error=function(e) NULL)
          fit <- tryCatch(lm(formula = mst ~ time-1), error=function(e) NULL)
          if (!is.null(fit)){
            RSS.p <- sum(residuals(fit)^2)
            TSS <- sum((mst - mean(mst,na.rm = T))^2,na.rm = T)
            rsq <- 1-(RSS.p/TSS)
            
            coef <- c(coef(fit),0.02,rsq)
          } 
         
#           else if (fitzero==FALSE) {
#       #fit <- lm(mst[,1] ~ time)
#       fit <- tryCatch(nls(formula = mst[,1] ~ time*x1+x2,lower = list(x1=0,x2=0),upper = list(x1=100,x2=0.05), start=list(x1=0,x2=0),algorithm = "port"), error=function(e) NULL)
#       
#       if (!is.null(fit)){
#         RSS.p <- sum(residuals(fit)^2)
#         TSS <- sum((mst[,1] - mean(mst[,1]))^2)
#         rsq <- 1-(RSS.p/TSS)
#           coef <- c(coef(fit),rsq)
#       } else {
#         coef <- c(NA,NA,NA)
#       }
#     
#     }
        # MSD=4*D*t
    coef[1] <- coef[1]/4
    #coef[1] <- as.numeric(coef[1]/4)
    return(coef)
     
      
  })
   names(coef) <-  c("track","D","intercept","Rsquared")#,paste("dx_",1:n,sep=""),paste("N_",1:n,sep=""))
  
  return (coef)
}

mean_speed <- function(x,framerate=30,pxsize=100){
  #speed um/ms
  
  #scale to um
  x[,2] <- (x[,2]*pxsize)/1000
  x[,3] <- (x[,3]*pxsize)/1000
  
  #apply function for every track (column V4)
  meansp <- ddply(x,.variables = "track",.fun= function(x) {
  speed <- 0
    for (i in 2:nrow(x)){
        speed <- speed + (x$X[i]-x$X[i-1])^2+(x$Y[i]-x$Y[i-1])^2/((x$frame[i]-x$frame[i-1])*framerate)
      }
    speed <- speed/nrow(x)
  })
  names(meansp) <- c("trackid","speed")
  return(meansp)
}

segment_statistics <- function(segments){
  
  result <- dlply(segments,.variables = c("cellID","track"),function(x){
  seg_angle <- vector()
  for(i in 1:(nrow(x)-2)){
    A <- as.numeric(x[i,2:3])
    B <- as.numeric(x[i+1,2:3])
    C <- as.numeric(x[i+2,2:3])
    AB <- B-A
    CB <- C-B
    dAB <- sqrt((B[1]-A[1])^2+(B[2]-A[2])^2)
    dBC <- sqrt((C[1]-B[1])^2+(C[2]-B[2])^2)
    
    seg_angle <- c(seg_angle,(acos((AB%*%CB)/(dAB*dBC))*180/pi))
    
    
  }
    seg_angle <- c(NA,seg_angle,NA)
    return(seg_angle)
  })
}

chull_stat <- function(x){
  library(pracma)
  x[,2] <- (x[,2]*pxsize)/1000
  x[,3] <- (x[,3]*pxsize)/1000
  out <- ddply(x,.variables = "track",.fun= function(x) {
    x <- cbind(x$X,x$Y)
    y <- chull(x)
    area <- polyarea(x[rev(y),1], x[rev(y),2])
    perimeter <- poly_length(x[rev(y),1], x[rev(y),2])
  D <- princomp(x)
  angle <- atan2(D$loadings[2,1],D$loadings[1,1])
  #return(data.frame("sd"=((sd(x$X)+sd(x$Y))/2)*2.35,"N"=nrow(x),"channel"=1))
  # return(data.frame(,"N"=nrow(x),"channel"=1))
  return(data.frame("N"=nrow(x),"meanX"=mean(x[,1]),"meanY"=mean(x[,2]), "sd"=((sd(x[,1])+sd(x[,2]))/2),"sdpri"=((D$sdev[1]+D$sdev[2])/2),"width"=(max(D$scores[,1])-min(D$scores[,1])),"ratio"=(D$sdev[1]/D$sdev[2]),"angle"=angle,"chull_area"=area,"chull_perimeter"=perimeter))
})
  
return(out)
}

##parameters
#directory <- "D:/Stack/experimental data/Tracking all_data/"
directory <- "D:/Stack/experimental data/160325 RA54-SNAP tracking/200ms tracking"
directory <- "D:/Stack/experimental data/160415 tracking beads_SiR/"
framerate <- 1/200 #ms 1/200
n <- 4 #number of timepoints taken into account for MSD fit
fitzero <- FALSE #should fit go through origin (0,0)
min_length <- 20
pixelsize <- 100 #nm
fitMSD <- F
offset <- 4*0.01^2 #33 nm led to a constant offset (r2(0)) in MSD of 0.0044 mm2



###
condition_list <- list.dirs(directory,full.names = F,recursive = F)
#condition_list <- "SNAP-SiR"

for (i in 1:length(condition_list)){
  segments <- list()
  dir <- file.path(directory,condition_list[i])
  filelist <- list.dirs(dir,full.names = T,recursive = F)
  total <- length(filelist)
  # create progress bar
  pb <- txtProgressBar(min = 0, max = total, style = 3)
  
  D <- list()
  
  for(j in 1:total){
    Sys.sleep(0.1)
    # update progress bar
    setTxtProgressBar(pb, j)
   # tracks_simple <- 005 SNAP-SiR tracking","tracks.simple.filtered.txt"),sep = "\t",header = F)
    tracks_simple <- read.csv(file.path(filelist[j],"tracks.simple.filtered.txt"),sep = "\t",header = F)
    names(tracks_simple) <- c("frame","X","Y","track","displacement","intensity","sigma","fit_error")
    segments[[basename(filelist[j])]] <- data.frame(tracks_simple,"cellID"=j)
    msd <- MSD(tracks = tracks_simple,n = n,framerate=framerate)
    if(fitMSD){
      coef <-  MSD_fit(msd,n = n,fitzero = fitzero,framerate=framerate,offset)
    #result <- mean_speed(x = tracks_simple,framerate=framerate)
      stats <- chull_stat(x=tracks_simple)
      D[[basename(filelist[j])]] <- data.frame("cellID"=j,msd,coef)#,"speed"=result$speed,stats[,-1])
    }
    D[[basename(filelist[j])]] <- data.frame("cellID"=j,msd)
    write.csv(coef,paste(filelist[j],"D.txt",sep="_"))
  }
  segments <- ldply(segments)
  write.csv(segments,file = file.path(dir,"segments.txt"))
  save(segments,file=file.path(dir,"segments.R"))
  #load(file=file.path(dir,"segments.R"))
  D <- ldply(D)
  D$condition <- condition_list[i]
  save(D,file=file.path(dir,"D.R"))
  close(pb)
}

condlist <- list.dirs(directory,full.names = T,recursive = F)
for (i in 1:length(condlist)){
  load(file.path(condlist[i],"D.R"))
  load(file.path(condlist[[i]],"segments.R"))
  .GlobalEnv[[paste("D_",condition_list[i],sep="")]] <- D
  .GlobalEnv[[paste("segments_",condition_list[i],sep="")]] <- segments
}

allD <- rbind(D_WT_noIR,D_WT_IR,D_KA_noIR,D_KA_IR)
allD <- subset(allD,allD$D>0)
m <- ggplot(allD, aes(x=D,y=..density..,color=condition)) + xlim(c(0.000001,1E3))
m + geom_density() + scale_x_log10() 
The diff you're trying to view is too large. Only the first 1000 changed files have been loaded.
Showing with 0 additions and 0 deletions (0 / 0 diffs computed)
swh spinner

Computing file changes ...

back to top

Software Heritage — Copyright (C) 2015–2026, The Software Heritage developers. License: GNU AGPLv3+.
The source code of Software Heritage itself is available on our development forge.
The source code files archived by Software Heritage are available under their own copyright and licenses.
Terms of use: Archive access, API— Content policy— Contact— JavaScript license information— Web API