52 Commits

Author SHA1 Message Date
24d7820165 Replace niv with flakes 2021-11-02 16:11:17 +01:00
3d8e0fe114 Bump nixpkgs revision 2021-07-06 17:26:52 +02:00
2058fc96d7 Consider the read start in the Cys location 2021-05-15 17:49:39 +02:00
e4189cab01 Choose the normal phenotype sequence for TRBJ2-2 2021-05-15 17:36:58 +02:00
2acec89f84 Rename output file to curesim-HVR.fastq 2021-05-14 20:01:56 +02:00
91b3e37bd8 Start j_alignment with the portion after the Cys 2021-05-13 19:06:58 +02:00
bf33b65191 Convert org mode README to markdown 2021-05-05 12:39:07 +02:00
e8f03189c2 Document the alignment script 2021-05-04 19:25:11 +02:00
f4b7a41599 Document the repertoire script 2021-05-04 18:34:28 +02:00
9e8beefd38 Remove redundant directories 2021-05-04 11:13:25 +02:00
40205706e1 Bump nixpkgs revision 2021-05-04 02:28:12 +02:00
8ffa86a965 Elaborate on the project description in the README 2021-05-04 02:01:10 +02:00
1f7b40d224 Remove redundant JDK dependency 2021-05-04 01:57:34 +02:00
ad8abcc4fc Add usage instructions to the README 2021-05-04 01:28:49 +02:00
6440816a87 Remove imperative installation instructions 2021-05-04 00:59:05 +02:00
0e005735bc Create a Nix derivation for CuReSim 2021-05-04 00:57:35 +02:00
4f0936718b Add installation instruction to README 2021-05-03 23:27:19 +02:00
1b6e2d13ea Remove development dependencies 2021-05-03 23:22:16 +02:00
36eb73b458 Add alignment to generation script 2021-05-03 21:51:48 +02:00
81a57657fe Fix HVR end position computation 2021-05-03 21:51:32 +02:00
5afe040592 Isolate HVR sequence and save it to a file 2021-05-03 21:15:40 +02:00
c250c139dd Implement cysteine location in v_alignment 2021-04-27 19:34:01 +02:00
4dec2061fc Generate FASTQ files from the simulated repertoire 2021-04-22 13:59:45 +02:00
4adb92e901 Export original CDR3 to a file 2021-04-22 11:54:58 +02:00
83819b296b Save vj_sequences in a dataframe 2021-04-22 01:18:25 +02:00
a7c1df5ce2 Refactor get_vj_sequence function 2021-04-22 01:17:35 +02:00
81ebd4fbbe Rename function arguments to improve readability 2021-04-21 22:12:29 +02:00
659f0097d8 Get V and J sequences from sequence ID 2021-04-21 21:29:03 +02:00
fb5d781c66 Add space to sequence ID for easier parsing 2021-04-21 21:02:56 +02:00
35406497a3 Format generation script 2021-04-21 20:11:56 +02:00
b771071974 Remove csv from gitignore 2021-04-21 20:11:32 +02:00
2a997a3e5c Rename sequencing_runs to number_of_reads 2021-04-21 20:09:02 +02:00
1020d610d3 Run CuReSim n times for each sequence 2021-04-21 20:00:13 +02:00
5154a35fca Remove sequencing runs argument from repertoire 2021-04-21 19:59:38 +02:00
18ffbf9a75 Add v_call and j_call to sequence ID 2021-04-21 18:51:08 +02:00
82fdfdc6b9 Exchange pattern and subject in the alignment 2021-04-08 18:31:50 +02:00
dd9f7ffde4 Remove redundant HVR sequence construction 2021-04-07 19:49:44 +02:00
e694ee3292 Select the first sequence matching the identifier 2021-04-07 18:41:14 +02:00
e5a7b726a9 Add v_segments and j_segments objects 2021-04-07 18:32:58 +02:00
38b35f7d12 Align full sequences efficiently 2021-04-07 18:31:39 +02:00
f81e4af94e Amplify VDJ sequences to simplify parsing 2021-03-29 22:57:36 +02:00
576597cb04 Remove redundant sequencing runs argument 2021-03-29 20:40:01 +02:00
13f453718d Implement HVR sequence alignment 2021-03-27 09:39:59 +01:00
3a10380d8c Construct a dataframe containing the HVR region 2021-03-25 21:53:49 +01:00
8f5b9ee698 Parse curesim and VDJ sequences from files 2021-03-23 20:54:31 +01:00
66b39485a9 Save vdj alignment sequences to a CSV 2021-03-23 19:35:10 +01:00
97b8914cd5 Add literate programming notebook 2021-03-23 18:24:12 +01:00
bc666a37c7 Delete CuReSim log file after execution 2021-03-11 21:28:58 +01:00
d5bf8f3b72 Remove redundant casting 2021-03-11 21:28:00 +01:00
762791829a Save v_call and j_call to a text file 2021-03-11 21:03:16 +01:00
7b15df7614 Simplify repertoire generation 2021-03-10 12:34:20 +01:00
f4a001f821 Change CuReSim output file extension to FASTQ 2021-03-10 12:33:25 +01:00
15 changed files with 350 additions and 249 deletions

3
.gitignore vendored
View File

@@ -1,2 +1 @@
*.txt
*.fasta
*.fastq

68
README.md Normal file
View File

@@ -0,0 +1,68 @@
# locigenesis
locigenesis is a tool that generates a human T-cell receptor (TCR), runs
it through a sequence reader simulation tool and extracts CDR3.
The goal of this project is to generate both HVR sequences with and
without sequencing errors, in order to create datasets for a Machine
Learning algorithm.
## Technologies
- [immuneSIM](https://github.com/GreiffLab/immuneSIM/): in silico
generation of human and mouse BCR and TCR repertoires
- [CuReSim](http://www.pegase-biosciences.com/curesim-a-customized-read-simulator/):
read simulator that mimics Ion Torrent sequencing
## Installation
This project uses [Nix](https://nixos.org/) to ensure reproducible
builds.
1. Install Nix (compatible with MacOS, Linux and
[WSL](https://docs.microsoft.com/en-us/windows/wsl/about)):
```bash
curl -L https://nixos.org/nix/install | sh
```
2. Clone the repository:
```bash
git clone https://git.coolneng.duckdns.org/coolneng/locigenesis
```
3. Change the working directory to the project:
```bash
cd locigenesis
```
4. Enter the nix-shell:
```bash
nix-shell
```
After running these commands, you will find yourself in a shell that
contains all the needed dependencies.
## Usage
An execution script that accepts 2 parameters is provided, the following
command invokes it:
```bash
./generation.sh <number of sequences> <number of reads>
```
- \<number of sequences\>: an integer that specifies the number of
different sequences to generate
- \<number of reads\>: an integer that specifies the number of reads
to perform on each sequence
The script will generate 2 files under the data directory:
|HVR.fastq | curesim-HVR.fastq |
|:----:|:-----:|
|Contains the original CDR3 sequence|Contains CDR3 after the read simulation, with sequencing errors |

View File

@@ -1,3 +0,0 @@
* locigenesis
locigenesis is a tool that generates an immune repertoire and runs it through a sequence reader simulation tool, to generate sequencing errors.

View File

BIN
data/j_segments_phe.rds Normal file

Binary file not shown.

View File

BIN
data/v_segments.rds Normal file

Binary file not shown.

41
flake.lock generated Normal file
View File

@@ -0,0 +1,41 @@
{
"nodes": {
"flake-utils": {
"locked": {
"lastModified": 1634851050,
"narHash": "sha256-N83GlSGPJJdcqhUxSCS/WwW5pksYf3VP1M13cDRTSVA=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "c91f3de5adaf1de973b797ef7485e441a65b8935",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1635865339,
"narHash": "sha256-fmI8PxMmL7WXV/O8m6vT9/yW42buxvAYeRNpcABvnKs=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "26a56abd090ec5c8f4c6c9e1189fbfa4bcb8db3f",
"type": "github"
},
"original": {
"id": "nixpkgs",
"type": "indirect"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}

13
flake.nix Normal file
View File

@@ -0,0 +1,13 @@
{
description = ''
locigenesis is a tool that generates a human T-cell receptor (TCR), runs
it through a sequence reader simulation tool and extracts CDR3.
'';
inputs.flake-utils.url = "github:numtide/flake-utils";
outputs = { self, nixpkgs, flake-utils }:
flake-utils.lib.eachDefaultSystem (system:
let pkgs = nixpkgs.legacyPackages.${system};
in { devShell = import ./shell.nix { inherit pkgs; }; });
}

View File

@@ -1,7 +1,7 @@
#!/bin/sh
usage() {
echo "usage: generation.sh <number of sequences> <sequencing runs>"
echo "usage: generation.sh <number of sequences> <number of reads>"
exit 1
}
@@ -10,12 +10,13 @@ if [ $# != 2 ]; then
fi
sequences=$1
sequencing_runs=$2
read_mean_size=350
read_variance_size=0.0
number_of_reads=$2
data_directory="data/"
file="sequence.fasta"
fastq=".fastq"
filename="sequence"
prefix="curesim_"
Rscript src/repertoire.r "$sequences" "$sequencing_runs"
java -jar tools/CuReSim.jar -m "$read_mean_size" -sd "$read_variance_size" -f "$data_directory$file" -o "$data_directory$prefix$file"
Rscript src/repertoire.r "$sequences" "$number_of_reads" &&
CuReSim -f "$data_directory$filename$fastq" -o "$data_directory$prefix$filename$fastq"
Rscript src/alignment.r
rm "$data_directory/log.txt"

View File

@@ -1,26 +0,0 @@
{
"niv": {
"branch": "master",
"description": "Easy dependency management for Nix projects",
"homepage": "https://github.com/nmattia/niv",
"owner": "nmattia",
"repo": "niv",
"rev": "af958e8057f345ee1aca714c1247ef3ba1c15f5e",
"sha256": "1qjavxabbrsh73yck5dcq8jggvh3r2jkbr6b5nlz5d9yrqm9255n",
"type": "tarball",
"url": "https://github.com/nmattia/niv/archive/af958e8057f345ee1aca714c1247ef3ba1c15f5e.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"nixpkgs": {
"branch": "release-20.09",
"description": "Nix Packages collection",
"homepage": "",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "6f1ce38d0c0b1b25727d86637fd2f3baf7b0f1f6",
"sha256": "16da722vqn96k1scls8mr8l909hl66r7y4ik6sad4ms3vmxbkbb3",
"type": "tarball",
"url": "https://github.com/NixOS/nixpkgs/archive/6f1ce38d0c0b1b25727d86637fd2f3baf7b0f1f6.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}

View File

@@ -1,174 +0,0 @@
# This file has been generated by Niv.
let
#
# The fetchers. fetch_<type> fetches specs of type <type>.
#
fetch_file = pkgs: name: spec:
let
name' = sanitizeName name + "-src";
in
if spec.builtin or true then
builtins_fetchurl { inherit (spec) url sha256; name = name'; }
else
pkgs.fetchurl { inherit (spec) url sha256; name = name'; };
fetch_tarball = pkgs: name: spec:
let
name' = sanitizeName name + "-src";
in
if spec.builtin or true then
builtins_fetchTarball { name = name'; inherit (spec) url sha256; }
else
pkgs.fetchzip { name = name'; inherit (spec) url sha256; };
fetch_git = name: spec:
let
ref =
if spec ? ref then spec.ref else
if spec ? branch then "refs/heads/${spec.branch}" else
if spec ? tag then "refs/tags/${spec.tag}" else
abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!";
in
builtins.fetchGit { url = spec.repo; inherit (spec) rev; inherit ref; };
fetch_local = spec: spec.path;
fetch_builtin-tarball = name: throw
''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=tarball -a builtin=true'';
fetch_builtin-url = name: throw
''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`.
$ niv modify ${name} -a type=file -a builtin=true'';
#
# Various helpers
#
# https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695
sanitizeName = name:
(
concatMapStrings (s: if builtins.isList s then "-" else s)
(
builtins.split "[^[:alnum:]+._?=-]+"
((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name)
)
);
# The set of packages used when specs are fetched using non-builtins.
mkPkgs = sources: system:
let
sourcesNixpkgs =
import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; };
hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
hasThisAsNixpkgsPath = <nixpkgs> == ./.;
in
if builtins.hasAttr "nixpkgs" sources
then sourcesNixpkgs
else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
import <nixpkgs> {}
else
abort
''
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
add a package called "nixpkgs" to your sources.json.
'';
# The actual fetching function.
fetch = pkgs: name: spec:
if ! builtins.hasAttr "type" spec then
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
else if spec.type == "file" then fetch_file pkgs name spec
else if spec.type == "tarball" then fetch_tarball pkgs name spec
else if spec.type == "git" then fetch_git name spec
else if spec.type == "local" then fetch_local spec
else if spec.type == "builtin-tarball" then fetch_builtin-tarball name
else if spec.type == "builtin-url" then fetch_builtin-url name
else
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
# If the environment variable NIV_OVERRIDE_${name} is set, then use
# the path directly as opposed to the fetched source.
replace = name: drv:
let
saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name;
ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}";
in
if ersatz == "" then drv else
# this turns the string into an actual Nix path (for both absolute and
# relative paths)
if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}";
# Ports of functions for older nix versions
# a Nix version of mapAttrs if the built-in doesn't exist
mapAttrs = builtins.mapAttrs or (
f: set: with builtins;
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295
range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1);
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257
stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1));
# https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269
stringAsChars = f: s: concatStrings (map f (stringToCharacters s));
concatMapStrings = f: list: concatStrings (map f list);
concatStrings = builtins.concatStringsSep "";
# https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331
optionalAttrs = cond: as: if cond then as else {};
# fetchTarball version that is compatible between all the versions of Nix
builtins_fetchTarball = { url, name ? null, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchTarball;
in
if lessThan nixVersion "1.12" then
fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
else
fetchTarball attrs;
# fetchurl version that is compatible between all the versions of Nix
builtins_fetchurl = { url, name ? null, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchurl;
in
if lessThan nixVersion "1.12" then
fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; }))
else
fetchurl attrs;
# Create the final "sources" from the config
mkSources = config:
mapAttrs (
name: spec:
if builtins.hasAttr "outPath" spec
then abort
"The values in sources.json should not have an 'outPath' attribute"
else
spec // { outPath = replace name (fetch config.pkgs name spec); }
) config.sources;
# The "config" used by the fetchers
mkConfig =
{ sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null
, sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile)
, system ? builtins.currentSystem
, pkgs ? mkPkgs sources system
}: rec {
# The sources, i.e. the attribute set of spec name to spec
inherit sources;
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
inherit pkgs;
};
in
mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }

View File

@@ -1,15 +1,29 @@
{ sources ? import ./nix/sources.nix, pkgs ? import sources.nixpkgs { } }:
{ pkgs ? import <nixpkgs> { } }:
with pkgs;
mkShell {
buildInputs = [
R
rPackages.immuneSIM
rPackages.Biostrings
jdk
# Development tools
rPackages.languageserver
rPackages.lintr
];
let
CuReSim = stdenv.mkDerivation rec {
name = "CuReSim";
version = "1.3";
src = fetchzip { url =
"http://www.pegase-biosciences.com/wp-content/uploads/2015/08/${name}${version}.zip";
sha256 = "1hvlpgy4haqgqq52mkxhcl9i1fx67kgwi6f1mijvqzk0xff77hkp";
stripRoot = true;
extraPostFetch = ''
chmod go-w $out
'';
};
nativeBuildInputs = [ makeWrapper ];
installPhase = ''
mkdir -pv $out/share/java $out/bin
cp -r ${src} $out/share/java/${name}
makeWrapper ${jre}/bin/java $out/bin/CuReSim --add-flags "-jar $out/share/java/${name}/${name}.jar"
'';
};
in mkShell {
buildInputs =
[ R rPackages.immuneSIM rPackages.Biostrings rPackages.stringr CuReSim ];
}

153
src/alignment.r Normal file
View File

@@ -0,0 +1,153 @@
library(Biostrings)
library(parallel)
#' Import and process the TCR and VJ sequences
#'
#' @param file A file path with the sequences after applying a read simulator
#' @return A \code{list} with the TCR sequences and VJ sequences
parse_data <- function(file) {
reversed_sequences <- Biostrings::readQualityScaledDNAStringSet(file)
sequences <- Biostrings::reverseComplement(reversed_sequences)
vj_segments <- union(
readRDS("data/v_segments.rds"),
readRDS("data/j_segments_phe.rds")
)
return(list(sequences, vj_segments))
}
#' Extracts the VJ metadata from the sequences read identifier
#'
#' @param metadata The read identifier of a sequence
#' @return A \code{list} with the V and J gene identifier
parse_metadata <- function(metadata) {
id_elements <- unlist(strsplit(metadata, split = " "))
v_identifier <- id_elements[2]
j_identifier <- id_elements[3]
return(list(v_id = v_identifier, j_id = j_identifier))
}
#' Fetches the sequence that matches the VJ gene identifier
#'
#' @param names The names of the VJ sequences
#' @param vdj_segments A \code{DNAStringSet} containing the VJ sequences
#' @param id The read identifier of a sequence
#' @return A \code{character} containing the gene sequence
match_id_sequence <- function(names, vdj_segments, id) {
matches <- grep(names, pattern = id)
if(id == "TRBJ2-2"){
row <- matches[2]
} else {
row <- matches[1]
}
return(as.character(vdj_segments[row]))
}
#' Gets the V and J sequences for a particular read identifier
#'
#' @param metadata The read identifier of a sequence
#' @param names The names of the VJ sequences
#' @param vdj_segments A \code{DNAStringSet} containing the VJ sequences
#' @return A \code{list} with the V and J sequences
get_vj_sequence <- function(metadata, names, vdj_segments) {
identifiers <- parse_metadata(metadata)
v_sequence <- match_id_sequence(names, vdj_segments, id = identifiers["v_id"])
j_sequence <- match_id_sequence(names, vdj_segments, id = identifiers["j_id"])
return(list(v_seq = v_sequence, j_seq = j_sequence))
}
#' Obtains the VJ sequences for all the TCR sequences
#'
#' @param sequences A \code{QualityScaledDNAStringSet} with the TCR sequences
#' @param vdj_segments A \code{DNAStringSet} containing the VJ sequences
#' @return A \code{data.frame} with the V and J sequences
fetch_vj_sequences <- function(sequences, vdj_segments) {
vj_sequences <- sapply(names(sequences),
names(vdj_segments),
vdj_segments,
FUN = get_vj_sequence
)
results <- data.frame(t(vj_sequences))
return(results)
}
#' Perform a pairwise alignment of a sequence with the canonical V or J sequence
#'
#' @param sequence A \code{DNAString} containing the TCR sequences
#' @param vdj_segment A \code{DNAString} containing the V or J sequence
#' @return A \code{PairwiseAlignments}
align_sequence <- function(sequence, vdj_segment) {
return(Biostrings::pairwiseAlignment(
subject = sequence,
pattern = vdj_segment,
type = "global-local",
gapOpening = 1
))
}
#' Computes the coordinate shift of the Cysteine due to indels
#'
#' @param insertion An \code{IRanges} containing the insertions
#' @param deletion An \code{IRanges} containing the deletions
#' @param cys A \code{list} with the Cysteine coordinates
#' @param alignment A \code{PairwiseAlignments}
#' @return A \code{list} with the delta of the Cysteine coordinates
handle_indels <- function(insertion, deletion, cys, alignment) {
ins_start <- sum(Biostrings::width(deletion[start(deletion) <= cys$start]))
ins_end <- sum(Biostrings::width(deletion[end(deletion) <= cys$end]))
shift_num <- c(0, cumsum(Biostrings::width(insertion))[-length(ins_start)])
shifted_ins <- IRanges::shift(insertion, shift_num)
gaps <- sum(width(shifted_ins[end(shifted_ins) < cys$start + ins_start])) +
nchar(stringr::str_extract(alignedSubject(alignment), "^-*"))
return(list("start" = ins_start - gaps, "end" = ins_end - gaps))
}
#' Find the coordinates of the first Cysteine of the HVR
#'
#' @param alignment A \code{PairwiseAlignments}
#' @return A \code{list} with the Cysteine coordinates
get_cys_coordinates <- function(alignment) {
cys <- list("start" = 310, "end" = 312)
insertion <- unlist(Biostrings::insertion(alignment))
deletion <- unlist(Biostrings::deletion(alignment))
delta_coordinates <- handle_indels(insertion, deletion, cys, alignment)
read_start <- unlist(start(Biostrings::Views(alignment)))
cys_start <- cys$start + delta_coordinates$start + read_start - 1
cys_end <- cys$end + delta_coordinates$end + read_start
return(list("start" = cys_start, "end" = cys_end))
}
#' Delimit the hypervariable region (HVR) for each TCR sequence
#'
#' @param sequences A \code{QualityScaledDNAStringSet} with the TCR sequences
#' @param vdj_segments A \code{DNAStringSet} containing the VJ sequences
#' @param cores Number of cores to apply multiprocessing
#' @return A \code{QualityScaledDNAStringSet} containing the HVR
get_hvr_sequences <- function(sequences, vdj_segments, cores = detectCores()) {
df <- fetch_vj_sequences(sequences, vdj_segments)
v_alignment <- parallel::mcmapply(sequences,
df$v_seq,
FUN = align_sequence,
mc.cores = cores
)
cys_coordinates <- parallel::mclapply(v_alignment, FUN = get_cys_coordinates)
cys_df <- as.data.frame(do.call(rbind, cys_coordinates))
remaining <- Biostrings::subseq(sequences, start = unlist(cys_df$end) + 1)
j_alignment <- parallel::mcmapply(remaining,
df$j_seq,
FUN = align_sequence,
mc.cores = cores
)
j_start <- parallel::mclapply(
j_alignment,
function(x) start(Biostrings::Views(x)),
mc.cores = cores
)
hvr_start <- unlist(cys_df$start)
hvr_end <- unlist(cys_df$start) + unlist(j_start) + 2
hvr <- Biostrings::subseq(sequences, start = hvr_start, end = hvr_end)
return(hvr)
}
data <- parse_data(file = "data/curesim_sequence.fastq")
hvr <- get_hvr_sequences(sequences = data[[1]], vdj_segments = data[[2]])
Biostrings::writeXStringSet(hvr, "data/curesim-HVR.fastq", format = "fastq")

View File

@@ -1,42 +1,57 @@
library(immuneSIM)
library(Biostrings)
#' Generate the beta chain of a human T-cell receptor (TCR)
#'
#' @param number_of_sequences Number of different sequences to generate
#' @return A \code{data.frame} with the sequences, V and J genes and CDR3
generate_repertoire <- function(number_of_sequences) {
b_chain <- immuneSIM(
return(immuneSIM(
number_of_seqs = number_of_sequences,
species = "hs",
receptor = "tr",
chain = "b",
verbose = TRUE
chain = "b"
))
}
#' Saves the sequences and CDR3 to FASTQ files
#'
#' @param data A \code{data.frame} with the preprocessed TCR sequences and CDR3
save_data <- function(data) {
Biostrings::writeXStringSet(data$sequence,
"data/sequence.fastq",
format = "fastq"
)
return(b_chain)
Biostrings::writeXStringSet(data$junction, "data/HVR.fastq", format = "fastq")
}
# TODO save also v_call and j_call
preprocess_data <- function(repertoire, sequencing_runs) {
sequences <- as.character(repertoire$sequence)
reads <- Biostrings::DNAStringSet(rep(sequences, sequencing_runs))
names(reads) <- seq_len(length(reads))
reverse_complement <- Biostrings::reverseComplement(reads)
return(reverse_complement)
}
save_data <- function(repertoire) {
file_name <- "data/sequence.fasta"
Biostrings::writeXStringSet(repertoire, file_name, format = "fasta")
}
parse_cli_arguments <- function(args) {
if (length(args) != 2) {
stop("usage: repertoire.r <number of sequences> <sequencing_runs>")
}
return(c(args[1], args[2]))
#' Applies the reverse complement and amplifies the number of sequences
#'
#' @param data A \code{data.frame} containing the TCR sequences and CDR3
#' @param reads Number of times to amplify each sequence
#' @return A \code{data.frame} with reverse complement sequences and VJ metadata
process_data <- function(data, reads) {
dna_sequence <- Biostrings::DNAStringSet(data$sequence)
data$sequence <- Biostrings::reverseComplement(dna_sequence)
names(data$sequence) <- paste(rownames(data), data$v_call, data$j_call, " ")
data$junction <- Biostrings::DNAStringSet(data$junction)
names(data$junction) <- rownames(data)
amplified_data <- data[rep(seq_len(nrow(data)), reads), ]
return(amplified_data)
}
#' Checks the number of command line arguments and captures them
#'
#' @return A \code{vector} containing the command line arguments
parse_cli_arguments <- function() {
args <- commandArgs(trailingOnly = TRUE)
arguments <- parse_cli_arguments(args)
number_of_sequences <- as.integer(arguments[1])
sequencing_runs <- as.integer(arguments[2])
repertoire <- generate_repertoire(number_of_sequences)
processed_data <- preprocess_data(repertoire, sequencing_runs)
save_data(processed_data)
if (length(args) != 2) {
stop("usage: repertoire.r <number of sequences> <number of reads>")
}
return(args)
}
args <- parse_cli_arguments()
repertoire <- generate_repertoire(number_of_sequences = as.integer(args[1]))
data <- process_data(data = repertoire, reads = args[2])
save_data(data)