2021-06-27 18:21:28 +02:00
|
|
|
|
@Article{pmid19706884,
|
2021-06-28 00:48:32 +02:00
|
|
|
|
Author = "Robins, H. S. and Campregher, P. V. and Srivastava, S. K.
|
|
|
|
|
and Wacher, A. and Turtle, C. J. and Kahsai, O. and Riddell,
|
|
|
|
|
S. R. and Warren, E. H. and Carlson, C. S. ",
|
|
|
|
|
Title = "{{C}omprehensive assessment of {T}-cell receptor beta-chain
|
|
|
|
|
diversity in alphabeta {T} cells}",
|
|
|
|
|
Journal = "Blood",
|
|
|
|
|
Year = 2009,
|
|
|
|
|
Volume = 114,
|
|
|
|
|
Number = 19,
|
|
|
|
|
Pages = "4099--4107",
|
|
|
|
|
Month = "Nov"
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@article {Nurk2021.05.26.445798,
|
|
|
|
|
author = {Nurk, Sergey and Koren, Sergey and Rhie, Arang and
|
|
|
|
|
Rautiainen, Mikko and Bzikadze, Andrey V. and Mikheenko, Alla
|
|
|
|
|
and Vollger, Mitchell R. and Altemose, Nicolas and Uralsky,
|
|
|
|
|
Lev and Gershman, Ariel and Aganezov, Sergey and Hoyt,
|
|
|
|
|
Savannah J. and Diekhans, Mark and Logsdon, Glennis A. and
|
|
|
|
|
Alonge, Michael and Antonarakis, Stylianos E. and Borchers,
|
|
|
|
|
Matthew and Bouffard, Gerard G. and Brooks, Shelise Y. and
|
|
|
|
|
Caldas, Gina V. and Cheng, Haoyu and Chin, Chen-Shan and Chow,
|
|
|
|
|
William and de Lima, Leonardo G. and Dishuck, Philip C. and
|
|
|
|
|
Durbin, Richard and Dvorkina, Tatiana and Fiddes, Ian T. and
|
|
|
|
|
Formenti, Giulio and Fulton, Robert S. and Fungtammasan,
|
|
|
|
|
Arkarachai and Garrison, Erik and Grady, Patrick G.S. and
|
|
|
|
|
Graves-Lindsay, Tina A. and Hall, Ira M. and Hansen, Nancy F.
|
|
|
|
|
and Hartley, Gabrielle A. and Haukness, Marina and Howe,
|
|
|
|
|
Kerstin and Hunkapiller, Michael W. and Jain, Chirag and Jain,
|
|
|
|
|
Miten and Jarvis, Erich D. and Kerpedjiev, Peter and Kirsche,
|
|
|
|
|
Melanie and Kolmogorov, Mikhail and Korlach, Jonas and
|
|
|
|
|
Kremitzki, Milinn and Li, Heng and Maduro, Valerie V. and
|
|
|
|
|
Marschall, Tobias and McCartney, Ann M. and McDaniel, Jennifer
|
|
|
|
|
and Miller, Danny E. and Mullikin, James C. and Myers, Eugene
|
|
|
|
|
W. and Olson, Nathan D. and Paten, Benedict and Peluso, Paul
|
|
|
|
|
and Pevzner, Pavel A. and Porubsky, David and Potapova, Tamara
|
|
|
|
|
and Rogaev, Evgeny I. and Rosenfeld, Jeffrey A. and Salzberg,
|
|
|
|
|
Steven L. and Schneider, Valerie A. and Sedlazeck, Fritz J.
|
|
|
|
|
and Shafin, Kishwar and Shew, Colin J. and Shumate, Alaina and
|
|
|
|
|
Sims, Yumi and Smit, Arian F. A. and Soto, Daniela C. and
|
|
|
|
|
Sovi{\'c}, Ivan and Storer, Jessica M. and Streets, Aaron and
|
|
|
|
|
Sullivan, Beth A. and Thibaud-Nissen, Fran{\c c}oise and
|
|
|
|
|
Torrance, James and Wagner, Justin and Walenz, Brian P. and
|
|
|
|
|
Wenger, Aaron and Wood, Jonathan M. D. and Xiao, Chunlin and
|
|
|
|
|
Yan, Stephanie M. and Young, Alice C. and Zarate, Samantha and
|
|
|
|
|
Surti, Urvashi and McCoy, Rajiv C. and Dennis, Megan Y. and
|
|
|
|
|
Alexandrov, Ivan A. and Gerton, Jennifer L. and
|
|
|
|
|
O{\textquoteright}Neill, Rachel J. and Timp, Winston and Zook,
|
|
|
|
|
Justin M. and Schatz, Michael C. and Eichler, Evan E. and
|
|
|
|
|
Miga, Karen H. and Phillippy, Adam M.},
|
|
|
|
|
title = {The complete sequence of a human genome},
|
|
|
|
|
elocation-id = {2021.05.26.445798},
|
|
|
|
|
year = 2021,
|
|
|
|
|
doi = {10.1101/2021.05.26.445798},
|
|
|
|
|
publisher = {Cold Spring Harbor Laboratory},
|
|
|
|
|
abstract = {In 2001, Celera Genomics and the International Human Genome
|
|
|
|
|
Sequencing Consortium published their initial drafts of the
|
|
|
|
|
human genome, which revolutionized the field of genomics.
|
|
|
|
|
While these drafts and the updates that followed effectively
|
|
|
|
|
covered the euchromatic fraction of the genome, the
|
|
|
|
|
heterochromatin and many other complex regions were left
|
|
|
|
|
unfinished or erroneous. Addressing this remaining 8\% of the
|
|
|
|
|
genome, the Telomere-to-Telomere (T2T) Consortium has finished
|
|
|
|
|
the first truly complete 3.055 billion base pair (bp) sequence
|
|
|
|
|
of a human genome, representing the largest improvement to the
|
|
|
|
|
human reference genome since its initial release. The new
|
|
|
|
|
T2T-CHM13 reference includes gapless assemblies for all 22
|
|
|
|
|
autosomes plus Chromosome X, corrects numerous errors, and
|
|
|
|
|
introduces nearly 200 million bp of novel sequence containing
|
|
|
|
|
2,226 paralogous gene copies, 115 of which are predicted to be
|
|
|
|
|
protein coding. The newly completed regions include all
|
|
|
|
|
centromeric satellite arrays and the short arms of all five
|
|
|
|
|
acrocentric chromosomes, unlocking these complex regions of
|
|
|
|
|
the genome to variational and functional studies for the first
|
|
|
|
|
time.Competing Interest StatementAF and CSC are employees of
|
|
|
|
|
DNAnexus; IS, JK, MWH, PP, and AW are employees of Pacific
|
|
|
|
|
Biosciences; FJS has received travel funds to speak at events
|
|
|
|
|
hosted by Pacific Biosciences; SK and FJS have received travel
|
|
|
|
|
funds to speak at events hosted by Oxford Nanopore
|
|
|
|
|
Technologies. WT has licensed two patents to Oxford Nanopore
|
|
|
|
|
Technologies (US 8748091 and 8394584).},
|
|
|
|
|
URL = {https://www.biorxiv.org/content/early/2021/05/27/2021.05.26.445798},
|
|
|
|
|
eprint = {https://www.biorxiv.org/content/early/2021/05/27/2021.05.26.445798.full.pdf},
|
|
|
|
|
journal = {bioRxiv}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@ARTICLE{10.3389/fgene.2020.00900,
|
|
|
|
|
AUTHOR = {Wang, Luotong and Qu, Li and Yang, Longshu and Wang, Yiying
|
|
|
|
|
and Zhu, Huaiqiu},
|
|
|
|
|
TITLE = {NanoReviser: An Error-Correction Tool for Nanopore
|
|
|
|
|
Sequencing Based on a Deep Learning Algorithm},
|
|
|
|
|
JOURNAL = {Frontiers in Genetics},
|
|
|
|
|
VOLUME = 11,
|
|
|
|
|
PAGES = 900,
|
|
|
|
|
YEAR = 2020,
|
|
|
|
|
URL = {https://www.frontiersin.org/article/10.3389/fgene.2020.00900},
|
|
|
|
|
DOI = {10.3389/fgene.2020.00900},
|
|
|
|
|
ISSN = {1664-8021},
|
|
|
|
|
ABSTRACT = {Nanopore sequencing is regarded as one of the most
|
|
|
|
|
promising third-generation sequencing (TGS) technologies.
|
|
|
|
|
Since 2014, Oxford Nanopore Technologies (ONT) has developed a
|
|
|
|
|
series of devices based on nanopore sequencing to produce very
|
|
|
|
|
long reads, with an expected impact on genomics. However, the
|
|
|
|
|
nanopore sequencing reads are susceptible to a fairly high
|
|
|
|
|
error rate owing to the difficulty in identifying the DNA
|
|
|
|
|
bases from the complex electrical signals. Although several
|
|
|
|
|
basecalling tools have been developed for nanopore sequencing
|
|
|
|
|
over the past years, it is still challenging to correct the
|
|
|
|
|
sequences after applying the basecalling procedure. In this
|
|
|
|
|
study, we developed an open-source DNA basecalling reviser,
|
|
|
|
|
NanoReviser, based on a deep learning algorithm to correct the
|
|
|
|
|
basecalling errors introduced by current basecallers provided
|
|
|
|
|
by default. In our module, we re-segmented the raw electrical
|
|
|
|
|
signals based on the basecalled sequences provided by the
|
|
|
|
|
default basecallers. By employing convolution neural networks
|
|
|
|
|
(CNNs) and bidirectional long short-term memory (Bi-LSTM)
|
|
|
|
|
networks, we took advantage of the information from the raw
|
|
|
|
|
electrical signals and the basecalled sequences from the
|
|
|
|
|
basecallers. Our results showed NanoReviser, as a
|
|
|
|
|
post-basecalling reviser, significantly improving the
|
|
|
|
|
basecalling quality. After being trained on standard ONT
|
|
|
|
|
sequencing reads from public E. coli and human NA12878
|
|
|
|
|
datasets, NanoReviser reduced the sequencing error rate by
|
|
|
|
|
over 5% for both the E. coli dataset and the human dataset.
|
|
|
|
|
The performance of NanoReviser was found to be better than
|
|
|
|
|
those of all current basecalling tools. Furthermore, we
|
|
|
|
|
analyzed the modified bases of the E. coli dataset and added
|
|
|
|
|
the methylation information to train our module. With the
|
|
|
|
|
methylation annotation, NanoReviser reduced the error rate by
|
|
|
|
|
7% for the E. coli dataset and specifically reduced the error
|
|
|
|
|
rate by over 10% for the regions of the sequence rich in
|
|
|
|
|
methylated bases. To the best of our knowledge, NanoReviser is
|
|
|
|
|
the first post-processing tool after basecalling to accurately
|
|
|
|
|
correct the nanopore sequences without the time-consuming
|
|
|
|
|
procedure of building the consensus sequence. The NanoReviser
|
|
|
|
|
package is freely available at <ext-link ext-link-type="uri"
|
|
|
|
|
xlink:href="https://github.com/pkubioinformatics/NanoReviser"
|
|
|
|
|
xmlns:xlink="http://www.w3.org/1999/xlink">https://github.com/pkubioinformatics/NanoReviser</ext-link>.}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@article{HEATHER20161,
|
|
|
|
|
title = {The sequence of sequencers: The history of sequencing DNA},
|
|
|
|
|
journal = {Genomics},
|
|
|
|
|
volume = 107,
|
|
|
|
|
number = 1,
|
|
|
|
|
pages = {1-8},
|
|
|
|
|
year = 2016,
|
|
|
|
|
issn = {0888-7543},
|
|
|
|
|
doi = {https://doi.org/10.1016/j.ygeno.2015.11.003},
|
|
|
|
|
url = {https://www.sciencedirect.com/science/article/pii/S0888754315300410},
|
|
|
|
|
author = {James M. Heather and Benjamin Chain},
|
|
|
|
|
keywords = {DNA, RNA, Sequencing, Sequencer, History},
|
|
|
|
|
abstract = {Determining the order of nucleic acid residues in
|
|
|
|
|
biological samples is an integral component of a wide variety
|
|
|
|
|
of research applications. Over the last fifty years large
|
|
|
|
|
numbers of researchers have applied themselves to the
|
|
|
|
|
production of techniques and technologies to facilitate this
|
|
|
|
|
feat, sequencing DNA and RNA molecules. This time-scale has
|
|
|
|
|
witnessed tremendous changes, moving from sequencing short
|
|
|
|
|
oligonucleotides to millions of bases, from struggling towards
|
|
|
|
|
the deduction of the coding sequence of a single gene to rapid
|
|
|
|
|
and widely available whole genome sequencing. This article
|
|
|
|
|
traverses those years, iterating through the different
|
|
|
|
|
generations of sequencing technology, highlighting some of the
|
|
|
|
|
key discoveries, researchers, and sequences along the way.}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@Article{vanDijk2014,
|
|
|
|
|
author = {van Dijk, Erwin L. and Auger, H{\'e}l{\`e}ne and
|
|
|
|
|
Jaszczyszyn, Yan and Thermes, Claude},
|
|
|
|
|
title = {Ten years of next-generation sequencing technology},
|
|
|
|
|
journal = {Trends in Genetics},
|
|
|
|
|
year = 2014,
|
|
|
|
|
month = {Sep},
|
|
|
|
|
day = 01,
|
|
|
|
|
publisher = {Elsevier},
|
|
|
|
|
volume = 30,
|
|
|
|
|
number = 9,
|
|
|
|
|
pages = {418-426},
|
|
|
|
|
issn = {0168-9525},
|
|
|
|
|
doi = {10.1016/j.tig.2014.07.001},
|
|
|
|
|
url = {https://doi.org/10.1016/j.tig.2014.07.001}
|
2021-06-27 18:21:28 +02:00
|
|
|
|
}
|
2021-06-28 01:56:27 +02:00
|
|
|
|
|
|
|
|
|
@article {Sanger5463,
|
|
|
|
|
author = {Sanger, F. and Nicklen, S. and Coulson, A. R.},
|
|
|
|
|
title = {DNA sequencing with chain-terminating inhibitors},
|
|
|
|
|
volume = 74,
|
|
|
|
|
number = 12,
|
|
|
|
|
pages = {5463--5467},
|
|
|
|
|
year = 1977,
|
|
|
|
|
doi = {10.1073/pnas.74.12.5463},
|
|
|
|
|
publisher = {National Academy of Sciences},
|
|
|
|
|
abstract = {A new method for determining nucleotide sequences in DNA is
|
|
|
|
|
described. It is similar to the {\textquotedblleft}plus and
|
|
|
|
|
minus{\textquotedblright} method [Sanger, F. \& Coulson,
|
|
|
|
|
A. R. (1975) J. Mol. Biol. 94, 441-448] but makes use of the
|
|
|
|
|
2',3'-dideoxy and arabinonucleoside analogues of the normal
|
|
|
|
|
deoxynucleoside triphosphates, which act as specific
|
|
|
|
|
chain-terminating inhibitors of DNA polymerase. The technique
|
|
|
|
|
has been applied to the DNA of bacteriophage ϕX174 and is more
|
|
|
|
|
rapid and more accurate than either the plus or the minus
|
|
|
|
|
method.},
|
|
|
|
|
issn = {0027-8424},
|
|
|
|
|
URL = {https://www.pnas.org/content/74/12/5463},
|
|
|
|
|
eprint = {https://www.pnas.org/content/74/12/5463.full.pdf},
|
|
|
|
|
journal = {Proceedings of the National Academy of Sciences}
|
|
|
|
|
}
|
2021-06-28 19:01:25 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@Article{InternationalHumanGenomeSequencingConsortium2004,
|
|
|
|
|
author = {Consortium, International Human Genome Sequencing},
|
|
|
|
|
title = {Finishing the euchromatic sequence of the human genome},
|
|
|
|
|
journal = {Nature},
|
|
|
|
|
year = 2004,
|
|
|
|
|
month = {Oct},
|
|
|
|
|
day = 01,
|
|
|
|
|
volume = 431,
|
|
|
|
|
number = 7011,
|
|
|
|
|
pages = {931-945},
|
|
|
|
|
abstract = {The sequence of the human genome encodes the genetic
|
|
|
|
|
instructions for human physiology, as well as rich information
|
|
|
|
|
about human evolution. In 2001, the International Human Genome
|
|
|
|
|
Sequencing Consortium reported a draft sequence of the
|
|
|
|
|
euchromatic portion of the human genome. Since then, the
|
|
|
|
|
international collaboration has worked to convert this draft
|
|
|
|
|
into a genome sequence with high accuracy and nearly complete
|
|
|
|
|
coverage. Here, we report the result of this finishing
|
|
|
|
|
process. The current genome sequence (Build 35) contains 2.85
|
|
|
|
|
billion nucleotides interrupted by only 341 gaps. It covers
|
|
|
|
|
∼99{\%} of the euchromatic genome and is accurate to an error
|
|
|
|
|
rate of ∼1 event per 100,000 bases. Many of the remaining
|
|
|
|
|
euchromatic gaps are associated with segmental duplications
|
|
|
|
|
and will require focused work with new methods. The
|
|
|
|
|
near-complete sequence, the first for a vertebrate, greatly
|
|
|
|
|
improves the precision of biological analyses of the human
|
|
|
|
|
genome including studies of gene number, birth and death.
|
|
|
|
|
Notably, the human genome seems to encode only 20,000--25,000
|
|
|
|
|
protein-coding genes. The genome sequence reported here should
|
|
|
|
|
serve as a firm foundation for biomedical research in the
|
|
|
|
|
decades ahead.},
|
|
|
|
|
issn = {1476-4687},
|
|
|
|
|
doi = {10.1038/nature03001},
|
|
|
|
|
url = {https://doi.org/10.1038/nature03001}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@Article{Schloss2008,
|
|
|
|
|
author = {Schloss, Jeffery A.},
|
|
|
|
|
title = {How to get genomes at one ten-thousandth the cost},
|
|
|
|
|
journal = {Nature Biotechnology},
|
|
|
|
|
year = 2008,
|
|
|
|
|
month = {Oct},
|
|
|
|
|
day = 01,
|
|
|
|
|
volume = 26,
|
|
|
|
|
number = 10,
|
|
|
|
|
pages = {1113-1115},
|
|
|
|
|
abstract = {The NHGRI's Advanced DNA Sequencing Technology program is
|
|
|
|
|
spearheading the development of platforms that will bring
|
|
|
|
|
routine whole-genome sequencing closer to reality.},
|
|
|
|
|
issn = {1546-1696},
|
|
|
|
|
doi = {10.1038/nbt1008-1113},
|
|
|
|
|
url = {https://doi.org/10.1038/nbt1008-1113}
|
|
|
|
|
}
|
2021-06-29 02:44:36 +02:00
|
|
|
|
|
|
|
|
|
@Article{Shugay2014,
|
|
|
|
|
author = {Shugay, Mikhail and Britanova, Olga V. and Merzlyak,
|
|
|
|
|
Ekaterina M. and Turchaninova, Maria A. and Mamedov, Ilgar Z.
|
|
|
|
|
and Tuganbaev, Timur R. and Bolotin, Dmitriy A. and
|
|
|
|
|
Staroverov, Dmitry B. and Putintseva, Ekaterina V. and
|
|
|
|
|
Plevova, Karla and Linnemann, Carsten and Shagin, Dmitriy and
|
|
|
|
|
Pospisilova, Sarka and Lukyanov, Sergey and Schumacher, Ton N.
|
|
|
|
|
and Chudakov, Dmitriy M.},
|
|
|
|
|
title = {Towards error-free profiling of immune repertoires},
|
|
|
|
|
journal = {Nature Methods},
|
|
|
|
|
year = 2014,
|
|
|
|
|
month = {Jun},
|
|
|
|
|
day = 01,
|
|
|
|
|
volume = 11,
|
|
|
|
|
number = 6,
|
|
|
|
|
pages = {653-655},
|
|
|
|
|
abstract = {A two-step error correction process for high
|
|
|
|
|
throughput--sequenced T- and B-cell receptors allows the
|
|
|
|
|
elimination of most errors while not diminishing the natural
|
|
|
|
|
complexity of the repertoires.},
|
|
|
|
|
issn = {1548-7105},
|
|
|
|
|
doi = {10.1038/nmeth.2960},
|
|
|
|
|
url = {https://doi.org/10.1038/nmeth.2960}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Ma2019,
|
|
|
|
|
author = {Ma, Xiaotu and Shao, Ying and Tian, Liqing and Flasch,
|
|
|
|
|
Diane A. and Mulder, Heather L. and Edmonson, Michael N. and
|
|
|
|
|
Liu, Yu and Chen, Xiang and Newman, Scott and Nakitandwe, Joy
|
|
|
|
|
and Li, Yongjin and Li, Benshang and Shen, Shuhong and Wang,
|
|
|
|
|
Zhaoming and Shurtleff, Sheila and Robison, Leslie L. and
|
|
|
|
|
Levy, Shawn and Easton, John and Zhang, Jinghui},
|
|
|
|
|
title = {Analysis of error profiles in deep next-generation
|
|
|
|
|
sequencing data},
|
|
|
|
|
journal = {Genome Biology},
|
|
|
|
|
year = 2019,
|
|
|
|
|
month = {Mar},
|
|
|
|
|
day = 14,
|
|
|
|
|
volume = 20,
|
|
|
|
|
number = 1,
|
|
|
|
|
pages = 50,
|
|
|
|
|
abstract = {Sequencing errors are key confounding factors for detecting
|
|
|
|
|
low-frequency genetic variants that are important for cancer
|
|
|
|
|
molecular diagnosis, treatment, and surveillance using deep
|
|
|
|
|
next-generation sequencing (NGS). However, there is a lack of
|
|
|
|
|
comprehensive understanding of errors introduced at various
|
|
|
|
|
steps of a conventional NGS workflow, such as sample handling,
|
|
|
|
|
library preparation, PCR enrichment, and sequencing. In this
|
|
|
|
|
study, we use current NGS technology to systematically
|
|
|
|
|
investigate these questions.},
|
|
|
|
|
issn = {1474-760X},
|
|
|
|
|
doi = {10.1186/s13059-019-1659-6},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@mastersthesis{BenítezCantos-Master,
|
|
|
|
|
author = "María Soledad Benítez Cantos",
|
|
|
|
|
title = "Análisis de repertorios de receptores de células T a partir de datos de secuenciación masiva",
|
|
|
|
|
school = "Universidad de Granada",
|
|
|
|
|
year = "2019",
|
|
|
|
|
month = "{Jul}",
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@inbook{abbas_lichtman_pillai_2017,
|
|
|
|
|
place = {Philadelphia, PA},
|
|
|
|
|
edition = {9th},
|
|
|
|
|
booktitle = {Cellular and molecular immunology},
|
|
|
|
|
publisher = {Elsevier},
|
|
|
|
|
author = {Abbas, Abul K. and Lichtman, Andrew H. and Pillai, Shiv},
|
|
|
|
|
year = 2017,
|
|
|
|
|
pages = 204
|
|
|
|
|
}
|
2021-06-29 20:00:09 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@Article{CRICK1970,
|
|
|
|
|
author = {Crick, Francis},
|
|
|
|
|
title = {Central Dogma of Molecular Biology},
|
|
|
|
|
journal = {Nature},
|
|
|
|
|
year = 1970,
|
|
|
|
|
month = {Aug},
|
|
|
|
|
day = 01,
|
|
|
|
|
volume = 227,
|
|
|
|
|
number = 5258,
|
|
|
|
|
pages = {561-563},
|
|
|
|
|
abstract = {The central dogma of molecular biology deals with the
|
|
|
|
|
detailed residue-by-residue transfer of sequential
|
|
|
|
|
information. It states that such information cannot be
|
|
|
|
|
transferred from protein to either protein or nucleic acid.},
|
|
|
|
|
issn = {1476-4687},
|
|
|
|
|
doi = {10.1038/227561a0},
|
|
|
|
|
url = {https://doi.org/10.1038/227561a0}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Salk2018,
|
|
|
|
|
author = {Salk, Jesse J. and Schmitt, Michael W. and Loeb, Lawrence
|
|
|
|
|
A.},
|
|
|
|
|
title = {Enhancing the accuracy of next-generation sequencing for
|
|
|
|
|
detecting rare and subclonal mutations},
|
|
|
|
|
journal = {Nature Reviews Genetics},
|
|
|
|
|
year = 2018,
|
|
|
|
|
month = {May},
|
|
|
|
|
day = 01,
|
|
|
|
|
volume = 19,
|
|
|
|
|
number = 5,
|
|
|
|
|
pages = {269-285},
|
|
|
|
|
abstract = {The ability to identify low-frequency genetic variants
|
|
|
|
|
among heterogeneous populations of cells or DNA molecules is
|
|
|
|
|
important in many fields of basic science, clinical medicine
|
|
|
|
|
and other applications, yet current high-throughput DNA
|
|
|
|
|
sequencing technologies have an error rate between 1 per 100
|
|
|
|
|
and 1 per 1,000 base pairs sequenced, which obscures their
|
|
|
|
|
presence below this level.As next-generation sequencing
|
|
|
|
|
technologies evolved over the decade, throughput has improved
|
|
|
|
|
markedly, but raw accuracy has remained generally unchanged.
|
|
|
|
|
Researchers with a need for high accuracy developed data
|
|
|
|
|
filtering methods and incremental biochemical improvements
|
|
|
|
|
that modestly improve low-frequency variant detection, but
|
|
|
|
|
background errors remain limiting in many fields.The most
|
|
|
|
|
profoundly impactful means for reducing errors, first
|
|
|
|
|
developed approximately 7 years ago, has been the concept of
|
|
|
|
|
single-molecule consensus sequencing. This entails redundant
|
|
|
|
|
sequencing of multiple copies of a given specific DNA molecule
|
|
|
|
|
and discounting of variants that are not present in all or
|
|
|
|
|
most of the copies as likely errors.Consensus sequencing can
|
|
|
|
|
be achieved by labelling each molecule with a unique molecular
|
|
|
|
|
barcode before generating copies, which allows subsequent
|
|
|
|
|
comparison of these copies or schemes whereby copies are
|
|
|
|
|
physically joined and sequenced together. Because of
|
|
|
|
|
trade-offs in cost, time and accuracy, no single method is
|
|
|
|
|
optimal for every application, and each method should be
|
|
|
|
|
considered on a case-by-case basis.Major applications for
|
|
|
|
|
high-accuracy DNA sequencing include non-invasive cancer
|
|
|
|
|
diagnostics, cancer screening, early detection of cancer
|
|
|
|
|
relapse or impending drug resistance, infectious disease
|
|
|
|
|
applications, prenatal diagnostics, forensics and mutagenesis
|
|
|
|
|
assessment.Future advances in ultra-high-accuracy sequencing
|
|
|
|
|
are likely to be driven by an emerging generation of
|
|
|
|
|
single-molecule sequencers, particularly those that allow
|
|
|
|
|
independent sequence comparison of both strands of native DNA
|
|
|
|
|
duplexes.},
|
|
|
|
|
issn = {1471-0064},
|
|
|
|
|
doi = {10.1038/nrg.2017.117},
|
|
|
|
|
url = {https://doi.org/10.1038/nrg.2017.117}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@book{book:lehninger,
|
|
|
|
|
title = {Lehninger-Principles of Biochemistry},
|
|
|
|
|
author = {Albert Lehninger, David L. Nelson, Michael M. Cox},
|
|
|
|
|
publisher = {W. H. Freeman},
|
|
|
|
|
isbn = {9781429224161,1429224169},
|
|
|
|
|
year = 2008,
|
|
|
|
|
edition = {5th Edition},
|
|
|
|
|
pages = 276
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@inproceedings{crick1958protein,
|
|
|
|
|
title = {On protein synthesis},
|
|
|
|
|
author = {Crick, Francis HC},
|
|
|
|
|
booktitle = {Symp Soc Exp Biol},
|
|
|
|
|
volume = 12,
|
|
|
|
|
number = {138-63},
|
|
|
|
|
pages = 8,
|
|
|
|
|
year = 1958
|
|
|
|
|
}
|
2021-06-30 00:48:42 +02:00
|
|
|
|
|
|
|
|
|
@article{10.1093/bioinformatics/btg109,
|
|
|
|
|
author = {Lee, Christopher},
|
|
|
|
|
title = "{Generating consensus sequences from partial order multiple
|
|
|
|
|
sequence alignment graphs}",
|
|
|
|
|
journal = {Bioinformatics},
|
|
|
|
|
volume = 19,
|
|
|
|
|
number = 8,
|
|
|
|
|
pages = {999-1008},
|
|
|
|
|
year = 2003,
|
|
|
|
|
month = 05,
|
|
|
|
|
abstract = "{Motivation: Consensus sequence generation is important in
|
|
|
|
|
many kinds of sequence analysis ranging from sequence assembly
|
|
|
|
|
to profile-based iterative search methods. However, how can a
|
|
|
|
|
consensus be constructed when its inherent assumption—that the
|
|
|
|
|
aligned sequences form a single linear consensus—is not
|
|
|
|
|
true?Results: Partial Order Alignment (POA) enables
|
|
|
|
|
construction and analysis of multiple sequence alignments as
|
|
|
|
|
directed acyclic graphs containing complex branching
|
|
|
|
|
structure. Here we present a dynamic programming algorithm
|
|
|
|
|
(heaviest\_bundle) for generating multiple consensus sequences
|
|
|
|
|
from such complex alignments. The number and relationships of
|
|
|
|
|
these consensus sequences reveals the degree of structural
|
|
|
|
|
complexity of the source alignment. This is a powerful and
|
|
|
|
|
general approach for analyzing and visualizing complex
|
|
|
|
|
alignment structures, and can be applied to any alignment. We
|
|
|
|
|
illustrate its value for analyzing expressed sequence
|
|
|
|
|
alignments to detect alternative splicing, reconstruct full
|
|
|
|
|
length mRNA isoform sequences from EST fragments, and separate
|
|
|
|
|
paralog mixtures that can cause incorrect SNP
|
|
|
|
|
predictions.Availability: The heaviest\_bundle source code is
|
|
|
|
|
available at http://www.bioinformatics.ucla.edu/poaContact:
|
|
|
|
|
leec@mbi.ucla.edu*To whom correspondence should be
|
|
|
|
|
addressed.}",
|
|
|
|
|
issn = {1367-4803},
|
|
|
|
|
doi = {10.1093/bioinformatics/btg109},
|
|
|
|
|
url = {https://doi.org/10.1093/bioinformatics/btg109},
|
|
|
|
|
eprint = {https://academic.oup.com/bioinformatics/article-pdf/19/8/999/642375/btg109.pdf},
|
|
|
|
|
}
|
2021-06-30 01:45:45 +02:00
|
|
|
|
|
|
|
|
|
@Article{Nagar2013,
|
|
|
|
|
author = {Nagar, Anurag and Hahsler, Michael},
|
|
|
|
|
title = {Fast discovery and visualization of conserved regions in
|
|
|
|
|
DNA sequences using quasi-alignment},
|
|
|
|
|
journal = {BMC Bioinformatics},
|
|
|
|
|
year = 2013,
|
|
|
|
|
month = {Sep},
|
|
|
|
|
day = 13,
|
|
|
|
|
volume = 14,
|
|
|
|
|
number = 11,
|
|
|
|
|
pages = {S2},
|
|
|
|
|
abstract = {Next Generation Sequencing techniques are producing
|
|
|
|
|
enormous amounts of biological sequence data and analysis
|
|
|
|
|
becomes a major computational problem. Currently, most
|
|
|
|
|
analysis, especially the identification of conserved regions,
|
|
|
|
|
relies heavily on Multiple Sequence Alignment and its various
|
|
|
|
|
heuristics such as progressive alignment, whose run time grows
|
|
|
|
|
with the square of the number and the length of the aligned
|
|
|
|
|
sequences and requires significant computational resources. In
|
|
|
|
|
this work, we present a method to efficiently discover regions
|
|
|
|
|
of high similarity across multiple sequences without
|
|
|
|
|
performing expensive sequence alignment. The method is based
|
|
|
|
|
on approximating edit distance between segments of sequences
|
|
|
|
|
using p-mer frequency counts. Then, efficient high-throughput
|
|
|
|
|
data stream clustering is used to group highly similar
|
|
|
|
|
segments into so called quasi-alignments. Quasi-alignments
|
|
|
|
|
have numerous applications such as identifying species and
|
|
|
|
|
their taxonomic class from sequences, comparing sequences for
|
|
|
|
|
similarities, and, as in this paper, discovering conserved
|
|
|
|
|
regions across related sequences.},
|
|
|
|
|
issn = {1471-2105},
|
|
|
|
|
doi = {10.1186/1471-2105-14-S11-S2},
|
|
|
|
|
url = {https://doi.org/10.1186/1471-2105-14-S11-S2}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@book{book:771224,
|
|
|
|
|
title = {Artificial Intelligence: A Modern Approach},
|
|
|
|
|
author = {Stuart Russell, Peter Norvig},
|
|
|
|
|
publisher = {Prentice Hall},
|
|
|
|
|
isbn = {0136042597, 9780136042594},
|
|
|
|
|
year = 2010,
|
|
|
|
|
series = {Prentice Hall Series in Artificial Intelligence},
|
2021-07-01 02:53:01 +02:00
|
|
|
|
edition = {3rd},
|
2021-07-01 04:18:48 +02:00
|
|
|
|
pages = {38-45, 48-49, 55-56}
|
2021-07-01 02:53:01 +02:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@article{McCarthy_Minsky_Rochester_Shannon_2006,
|
|
|
|
|
title = {A Proposal for the Dartmouth Summer Research Project on
|
|
|
|
|
Artificial Intelligence, August 31, 1955},
|
|
|
|
|
volume = 27,
|
|
|
|
|
url = {https://ojs.aaai.org/index.php/aimagazine/article/view/1904},
|
|
|
|
|
DOI = {10.1609/aimag.v27i4.1904},
|
|
|
|
|
abstractNote = {The 1956 Dartmouth summer research project on artificial
|
|
|
|
|
intelligence was initiated by this August 31, 1955 proposal,
|
|
|
|
|
authored by John McCarthy, Marvin Minsky, Nathaniel Rochester,
|
|
|
|
|
and Claude Shannon. The original typescript consisted of 17
|
|
|
|
|
pages plus a title page. Copies of the typescript are housed
|
|
|
|
|
in the archives at Dartmouth College and Stanford University.
|
|
|
|
|
The first 5 papers state the proposal, and the remaining pages
|
|
|
|
|
give qualifications and interests of the four who proposed the
|
|
|
|
|
study. In the interest of brevity, this article reproduces
|
|
|
|
|
only the proposal itself, along with the short
|
|
|
|
|
autobiographical statements of the proposers.},
|
|
|
|
|
number = 4,
|
|
|
|
|
journal = {AI Magazine},
|
|
|
|
|
author = {McCarthy, John and Minsky, Marvin L. and Rochester,
|
|
|
|
|
Nathaniel and Shannon, Claude E.},
|
|
|
|
|
year = 2006,
|
|
|
|
|
month = {Dec.},
|
|
|
|
|
pages = 12
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@book{book:80129,
|
|
|
|
|
title = {Computational Intelligence. An Introduction},
|
|
|
|
|
author = {Andries P. Engelbrecht},
|
|
|
|
|
publisher = {Wiley},
|
|
|
|
|
isbn = {9780470035610,0470035617},
|
|
|
|
|
year = 2007,
|
|
|
|
|
edition = 2,
|
|
|
|
|
pages = {39-40}
|
2021-06-30 01:45:45 +02:00
|
|
|
|
}
|
2021-07-01 04:18:48 +02:00
|
|
|
|
|
|
|
|
|
@Inbook{Zou2009,
|
|
|
|
|
author = "Zou, Jinming and Han, Yi and So, Sung-Sau",
|
|
|
|
|
editor = "Livingstone, David J.",
|
|
|
|
|
title = "Overview of Artificial Neural Networks",
|
|
|
|
|
bookTitle = "Artificial Neural Networks: Methods and Applications",
|
|
|
|
|
year = 2009,
|
|
|
|
|
publisher = "Humana Press",
|
|
|
|
|
address = "Totowa, NJ",
|
|
|
|
|
pages = "14--22",
|
|
|
|
|
abstract = "The artificial neural network (ANN), or simply neural
|
|
|
|
|
network, is a machine learning method evolved from the idea of
|
|
|
|
|
simulating the human brain. The data explosion in modern drug
|
|
|
|
|
discovery research requires sophisticated analysis methods to
|
|
|
|
|
uncover the hidden causal relationships between single or
|
|
|
|
|
multiple responses and a large set of properties. The ANN is
|
|
|
|
|
one of many versatile tools to meet the demand in drug
|
|
|
|
|
discovery modeling. Compared to a traditional regression
|
|
|
|
|
approach, the ANN is capable of modeling complex nonlinear
|
|
|
|
|
relationships. The ANN also has excellent fault tolerance and
|
|
|
|
|
is fast and highly scalable with parallel processing. This
|
|
|
|
|
chapter introduces the background of ANN development and
|
|
|
|
|
outlines the basic concepts crucially important for
|
|
|
|
|
understanding more sophisticated ANN. Several commonly used
|
|
|
|
|
learning methods and network setups are discussed briefly at
|
|
|
|
|
the end of the chapter.",
|
|
|
|
|
isbn = "978-1-60327-101-1",
|
|
|
|
|
doi = "10.1007/978-1-60327-101-1_2",
|
|
|
|
|
url = "https://doi.org/10.1007/978-1-60327-101-1_2"
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@book{book:2610592,
|
|
|
|
|
title = {Principles of artificial neural networks},
|
|
|
|
|
author = {Graupe, Daniel},
|
|
|
|
|
publisher = {World Scientific Publ},
|
|
|
|
|
isbn = {9789814522731,9814522732},
|
|
|
|
|
year = 2013,
|
|
|
|
|
edition = {3. ed},
|
|
|
|
|
pages = {28-31}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Cireşan2010,
|
|
|
|
|
author = {Cire{\c{s}}an, Dan Claudiu and Meier, Ueli and Gambardella,
|
|
|
|
|
Luca Maria and Schmidhuber, J{\"u}rgen},
|
|
|
|
|
title = {Deep, Big, Simple Neural Nets for Handwritten Digit
|
|
|
|
|
Recognition},
|
|
|
|
|
journal = {Neural Computation},
|
|
|
|
|
year = 2010,
|
|
|
|
|
month = {Dec},
|
|
|
|
|
day = 01,
|
|
|
|
|
volume = 22,
|
|
|
|
|
number = 12,
|
|
|
|
|
pages = {3207-3220},
|
|
|
|
|
abstract = {Good old online backpropagation for plain multilayer
|
|
|
|
|
perceptrons yields a very low 0.35{\%} error rate on the MNIST
|
|
|
|
|
handwritten digits benchmark. All we need to achieve this best
|
|
|
|
|
result so far are many hidden layers, many neurons per layer,
|
|
|
|
|
numerous deformed training images to avoid overfitting, and
|
|
|
|
|
graphics cards to greatly speed up learning.},
|
|
|
|
|
issn = {0899-7667},
|
|
|
|
|
doi = {10.1162/NECO_a_00052},
|
|
|
|
|
url = {https://doi.org/10.1162/NECO_a_00052}
|
|
|
|
|
}
|
2021-07-01 23:51:05 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@Article{Rumelhart1986,
|
|
|
|
|
author = {Rumelhart, David E. and Hinton, Geoffrey E. and Williams,
|
|
|
|
|
Ronald J.},
|
|
|
|
|
title = {Learning representations by back-propagating errors},
|
|
|
|
|
journal = {Nature},
|
|
|
|
|
year = 1986,
|
|
|
|
|
month = {Oct},
|
|
|
|
|
day = 01,
|
|
|
|
|
volume = 323,
|
|
|
|
|
number = 6088,
|
|
|
|
|
pages = {533-536},
|
|
|
|
|
abstract = {We describe a new learning procedure, back-propagation, for
|
|
|
|
|
networks of neurone-like units. The procedure repeatedly
|
|
|
|
|
adjusts the weights of the connections in the network so as to
|
|
|
|
|
minimize a measure of the difference between the actual output
|
|
|
|
|
vector of the net and the desired output vector. As a result
|
|
|
|
|
of the weight adjustments, internal `hidden' units which are
|
|
|
|
|
not part of the input or output come to represent important
|
|
|
|
|
features of the task domain, and the regularities in the task
|
|
|
|
|
are captured by the interactions of these units. The ability
|
|
|
|
|
to create useful new features distinguishes back-propagation
|
|
|
|
|
from earlier, simpler methods such as the
|
|
|
|
|
perceptron-convergence procedure1.},
|
|
|
|
|
issn = {1476-4687},
|
|
|
|
|
doi = {10.1038/323533a0},
|
|
|
|
|
url = {https://doi.org/10.1038/323533a0}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@book{book:2530718,
|
|
|
|
|
title = {Machine Learning Refined: Foundations, Algorithms, and
|
|
|
|
|
Applications},
|
|
|
|
|
author = {Jeremy Watt, Reza Borhani, Aggelos K. Katsaggelos},
|
|
|
|
|
publisher = {Cambridge University Press},
|
|
|
|
|
isbn = {1108480721,9781108480727},
|
|
|
|
|
year = 2020,
|
|
|
|
|
edition = 2
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@article{ruder2016overview,
|
|
|
|
|
title = {An overview of gradient descent optimization algorithms},
|
|
|
|
|
author = {Ruder, Sebastian},
|
|
|
|
|
journal = {arXiv preprint arXiv:1609.04747},
|
|
|
|
|
year = 2016
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@article{DBLP:journals/corr/WangRX17,
|
|
|
|
|
author = {Haohan Wang and Bhiksha Raj and Eric P. Xing},
|
|
|
|
|
title = {On the Origin of Deep Learning},
|
|
|
|
|
journal = {CoRR},
|
|
|
|
|
volume = {abs/1702.07800},
|
|
|
|
|
year = 2017,
|
|
|
|
|
url = {http://arxiv.org/abs/1702.07800},
|
|
|
|
|
archivePrefix = {arXiv},
|
|
|
|
|
eprint = {1702.07800},
|
|
|
|
|
timestamp = {Mon, 13 Aug 2018 16:46:19 +0200},
|
|
|
|
|
biburl = {https://dblp.org/rec/journals/corr/WangRX17.bib},
|
|
|
|
|
bibsource = {dblp computer science bibliography, https://dblp.org}
|
|
|
|
|
}
|
2021-07-02 02:47:54 +02:00
|
|
|
|
|
|
|
|
|
@Inbook{Can2014,
|
|
|
|
|
author = "Can, Tolga",
|
|
|
|
|
editor = "Yousef, Malik and Allmer, Jens",
|
|
|
|
|
title = "Introduction to Bioinformatics",
|
|
|
|
|
bookTitle = "miRNomics: MicroRNA Biology and Computational Analysis",
|
|
|
|
|
year = 2014,
|
|
|
|
|
publisher = "Humana Press",
|
|
|
|
|
address = "Totowa, NJ",
|
|
|
|
|
pages = "51--71",
|
|
|
|
|
abstract = "Bioinformatics is an interdisciplinary field mainly
|
|
|
|
|
involving molecular biology and genetics, computer science,
|
|
|
|
|
mathematics, and statistics. Data intensive, large-scale
|
|
|
|
|
biological problems are addressed from a computational point
|
|
|
|
|
of view. The most common problems are modeling biological
|
|
|
|
|
processes at the molecular level and making inferences from
|
|
|
|
|
collected data. A bioinformatics solution usually involves the
|
|
|
|
|
following steps:Collect statistics from biological data.Build
|
|
|
|
|
a computational model.Solve a computational modeling
|
|
|
|
|
problem.Test and evaluate a computational algorithm.",
|
|
|
|
|
isbn = "978-1-62703-748-8",
|
|
|
|
|
doi = "10.1007/978-1-62703-748-8_4",
|
|
|
|
|
url = "https://doi.org/10.1007/978-1-62703-748-8_4"
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@Article{Hagen2000,
|
|
|
|
|
author = {Hagen, Joel B.},
|
|
|
|
|
title = {The origins of bioinformatics},
|
|
|
|
|
journal = {Nature Reviews Genetics},
|
|
|
|
|
year = 2000,
|
|
|
|
|
month = {Dec},
|
|
|
|
|
day = 01,
|
|
|
|
|
volume = 1,
|
|
|
|
|
number = 3,
|
|
|
|
|
pages = {231-236},
|
|
|
|
|
abstract = {Bioinformatics is often described as being in its infancy,
|
|
|
|
|
but computers emerged as important tools in molecular biology
|
|
|
|
|
during the early 1960s. A decade before DNA sequencing became
|
|
|
|
|
feasible, computational biologists focused on the rapidly
|
|
|
|
|
accumulating data from protein biochemistry. Without the
|
|
|
|
|
benefits of supercomputers or computer networks, these
|
|
|
|
|
scientists laid important conceptual and technical foundations
|
|
|
|
|
for bioinformatics today.},
|
|
|
|
|
issn = {1471-0064},
|
|
|
|
|
doi = {10.1038/35042090},
|
|
|
|
|
url = {https://doi.org/10.1038/35042090}
|
|
|
|
|
}
|
2021-07-02 20:59:43 +02:00
|
|
|
|
|
|
|
|
|
@article{doi:10.1146/annurev-genom-090413-025358,
|
|
|
|
|
author = {Reinert, Knut and Langmead, Ben and Weese, David and Evers,
|
|
|
|
|
Dirk J.},
|
|
|
|
|
title = {Alignment of Next-Generation Sequencing Reads},
|
|
|
|
|
journal = {Annual Review of Genomics and Human Genetics},
|
|
|
|
|
volume = 16,
|
|
|
|
|
number = 1,
|
|
|
|
|
pages = {133-151},
|
|
|
|
|
year = 2015,
|
|
|
|
|
doi = {10.1146/annurev-genom-090413-025358},
|
|
|
|
|
note = {PMID: 25939052},
|
|
|
|
|
URL = { https://doi.org/10.1146/annurev-genom-090413-025358 },
|
|
|
|
|
eprint = { https://doi.org/10.1146/annurev-genom-090413-025358 }
|
|
|
|
|
,
|
|
|
|
|
abstract = { High-throughput DNA sequencing has considerably changed
|
|
|
|
|
the possibilities for conducting biomedical research by
|
|
|
|
|
measuring billions of short DNA or RNA fragments. A central
|
|
|
|
|
computational problem, and for many applications a first step,
|
|
|
|
|
consists of determining where the fragments came from in the
|
|
|
|
|
original genome. In this article, we review the main
|
|
|
|
|
techniques for generating the fragments, the main
|
|
|
|
|
applications, and the main algorithmic ideas for computing a
|
|
|
|
|
solution to the read alignment problem. In addition, we
|
|
|
|
|
describe pitfalls and difficulties connected to determining
|
|
|
|
|
the correct positions of reads. }
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@book{book:211898,
|
|
|
|
|
title = {Mark's Basic Medical Biochemistry A Clinical Approach},
|
|
|
|
|
author = {Michael A. Lieberman, Allan Marks},
|
|
|
|
|
publisher = {Lippincott Williams & Wilkins},
|
|
|
|
|
isbn = {9780781770224,078177022X},
|
|
|
|
|
year = 2008,
|
|
|
|
|
series = {Point Lippincott Williams & Wilkins},
|
|
|
|
|
edition = {Third},
|
|
|
|
|
pages = {209, 260},
|
|
|
|
|
}
|
2021-07-03 18:12:53 +02:00
|
|
|
|
|
|
|
|
|
@article{ABIODUN2018e00938,
|
|
|
|
|
title = {State-of-the-art in artificial neural network applications:
|
|
|
|
|
A survey},
|
|
|
|
|
journal = {Heliyon},
|
|
|
|
|
volume = 4,
|
|
|
|
|
number = 11,
|
|
|
|
|
pages = {e00938},
|
|
|
|
|
year = 2018,
|
|
|
|
|
issn = {2405-8440},
|
|
|
|
|
doi = {https://doi.org/10.1016/j.heliyon.2018.e00938},
|
|
|
|
|
url = {https://www.sciencedirect.com/science/article/pii/S2405844018332067},
|
|
|
|
|
author = {Oludare Isaac Abiodun and Aman Jantan and Abiodun Esther
|
|
|
|
|
Omolara and Kemi Victoria Dada and Nachaat AbdElatif Mohamed
|
|
|
|
|
and Humaira Arshad},
|
|
|
|
|
keywords = {Computer science},
|
|
|
|
|
abstract = {This is a survey of neural network applications in the
|
|
|
|
|
real-world scenario. It provides a taxonomy of artificial
|
|
|
|
|
neural networks (ANNs) and furnish the reader with knowledge
|
|
|
|
|
of current and emerging trends in ANN applications research
|
|
|
|
|
and area of focus for researchers. Additionally, the study
|
|
|
|
|
presents ANN application challenges, contributions, compare
|
|
|
|
|
performances and critiques methods. The study covers many
|
|
|
|
|
applications of ANN techniques in various disciplines which
|
|
|
|
|
include computing, science, engineering, medicine,
|
|
|
|
|
environmental, agriculture, mining, technology, climate,
|
|
|
|
|
business, arts, and nanotechnology, etc. The study assesses
|
|
|
|
|
ANN contributions, compare performances and critiques methods.
|
|
|
|
|
The study found that neural-network models such as feedforward
|
|
|
|
|
and feedback propagation artificial neural networks are
|
|
|
|
|
performing better in its application to human problems.
|
|
|
|
|
Therefore, we proposed feedforward and feedback propagation
|
|
|
|
|
ANN models for research focus based on data analysis factors
|
|
|
|
|
like accuracy, processing speed, latency, fault tolerance,
|
|
|
|
|
volume, scalability, convergence, and performance. Moreover,
|
|
|
|
|
we recommend that instead of applying a single method, future
|
|
|
|
|
research can focus on combining ANN models into one
|
|
|
|
|
network-wide application.}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@article{LIU201711,
|
|
|
|
|
title = {A survey of deep neural network architectures and their
|
|
|
|
|
applications},
|
|
|
|
|
journal = {Neurocomputing},
|
|
|
|
|
volume = 234,
|
|
|
|
|
pages = {11-26},
|
|
|
|
|
year = 2017,
|
|
|
|
|
issn = {0925-2312},
|
|
|
|
|
doi = {https://doi.org/10.1016/j.neucom.2016.12.038},
|
|
|
|
|
url = {https://www.sciencedirect.com/science/article/pii/S0925231216315533},
|
|
|
|
|
author = {Weibo Liu and Zidong Wang and Xiaohui Liu and Nianyin Zeng
|
|
|
|
|
and Yurong Liu and Fuad E. Alsaadi},
|
|
|
|
|
keywords = {Autoencoder, Convolutional neural network, Deep learning,
|
|
|
|
|
Deep belief network, Restricted Boltzmann machine},
|
|
|
|
|
abstract = {Since the proposal of a fast learning algorithm for deep
|
|
|
|
|
belief networks in 2006, the deep learning techniques have
|
|
|
|
|
drawn ever-increasing research interests because of their
|
|
|
|
|
inherent capability of overcoming the drawback of traditional
|
|
|
|
|
algorithms dependent on hand-designed features. Deep learning
|
|
|
|
|
approaches have also been found to be suitable for big data
|
|
|
|
|
analysis with successful applications to computer vision,
|
|
|
|
|
pattern recognition, speech recognition, natural language
|
|
|
|
|
processing, and recommendation systems. In this paper, we
|
|
|
|
|
discuss some widely-used deep learning architectures and their
|
|
|
|
|
practical applications. An up-to-date overview is provided on
|
|
|
|
|
four deep learning architectures, namely, autoencoder,
|
|
|
|
|
convolutional neural network, deep belief network, and
|
|
|
|
|
restricted Boltzmann machine. Different types of deep neural
|
|
|
|
|
networks are surveyed and recent progresses are summarized.
|
|
|
|
|
Applications of deep learning techniques on some selected
|
|
|
|
|
areas (speech recognition, pattern recognition and computer
|
|
|
|
|
vision) are highlighted. A list of future research topics are
|
|
|
|
|
finally given with clear justifications.}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@misc{chervinskii_2015,
|
|
|
|
|
title = {Autoencoder structure},
|
|
|
|
|
url = {https://commons.wikimedia.org/wiki/File:Autoencoder_structure.png},
|
|
|
|
|
journal = {Wikimedia},
|
|
|
|
|
author = {Chervinskii},
|
|
|
|
|
year = 2015,
|
|
|
|
|
month = {Dec}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@book{Goodfellow-et-al-2016,
|
|
|
|
|
title = {Deep Learning},
|
|
|
|
|
author = {Ian Goodfellow and Yoshua Bengio and Aaron Courville},
|
|
|
|
|
publisher = {MIT Press},
|
|
|
|
|
note = {\url{http://www.deeplearningbook.org}},
|
|
|
|
|
year = 2016
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Lewis_2020,
|
|
|
|
|
author = {Lewis, Mike and Liu, Yinhan and Goyal, Naman and
|
|
|
|
|
Ghazvininejad, Marjan and Mohamed, Abdelrahman and Levy, Omer
|
|
|
|
|
and Stoyanov, Veselin and Zettlemoyer, Luke},
|
|
|
|
|
title = {BART: Denoising Sequence-to-Sequence Pre-training for
|
|
|
|
|
Natural Language Generation, Translation, and Comprehension},
|
|
|
|
|
journal = {Proceedings of the 58th Annual Meeting of the Association
|
|
|
|
|
for Computational Linguistics},
|
|
|
|
|
year = 2020,
|
|
|
|
|
doi = {10.18653/v1/2020.acl-main.703},
|
|
|
|
|
url = {http://dx.doi.org/10.18653/v1/2020.acl-main.703},
|
|
|
|
|
publisher = {Association for Computational Linguistics}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@article{bigdeli17_image_restor_using_autoen_prior,
|
|
|
|
|
author = {Bigdeli, Siavash Arjomand and Zwicker, Matthias},
|
|
|
|
|
title = {Image Restoration Using Autoencoding Priors},
|
|
|
|
|
journal = {CoRR},
|
|
|
|
|
year = 2017,
|
|
|
|
|
url = {http://arxiv.org/abs/1703.09964v1},
|
|
|
|
|
abstract = {We propose to leverage denoising autoencoder networks as
|
|
|
|
|
priors to address image restoration problems. We build on the
|
|
|
|
|
key observation that the output of an optimal denoising
|
|
|
|
|
autoencoder is a local mean of the true data density, and the
|
|
|
|
|
autoencoder error (the difference between the output and input
|
|
|
|
|
of the trained autoencoder) is a mean shift vector. We use the
|
|
|
|
|
magnitude of this mean shift vector, that is, the distance to
|
|
|
|
|
the local mean, as the negative log likelihood of our natural
|
|
|
|
|
image prior. For image restoration, we maximize the likelihood
|
|
|
|
|
using gradient descent by backpropagating the autoencoder
|
|
|
|
|
error. A key advantage of our approach is that we do not need
|
|
|
|
|
to train separate networks for different image restoration
|
|
|
|
|
tasks, such as non-blind deconvolution with different kernels,
|
|
|
|
|
or super-resolution at different magnification factors. We
|
|
|
|
|
demonstrate state of the art results for non-blind
|
|
|
|
|
deconvolution and super-resolution using the same autoencoding
|
|
|
|
|
prior.},
|
|
|
|
|
archivePrefix = {arXiv},
|
|
|
|
|
eprint = {1703.09964},
|
|
|
|
|
primaryClass = {cs.CV},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@article{makhzani15_adver_autoen,
|
|
|
|
|
author = {Makhzani, Alireza and Shlens, Jonathon and Jaitly, Navdeep
|
|
|
|
|
and Goodfellow, Ian and Frey, Brendan},
|
|
|
|
|
title = {Adversarial Autoencoders},
|
|
|
|
|
journal = {CoRR},
|
|
|
|
|
year = 2015,
|
|
|
|
|
url = {http://arxiv.org/abs/1511.05644v2},
|
|
|
|
|
abstract = {In this paper, we propose the "adversarial autoencoder"
|
|
|
|
|
(AAE), which is a probabilistic autoencoder that uses the
|
|
|
|
|
recently proposed generative adversarial networks (GAN) to
|
|
|
|
|
perform variational inference by matching the aggregated
|
|
|
|
|
posterior of the hidden code vector of the autoencoder with an
|
|
|
|
|
arbitrary prior distribution. Matching the aggregated
|
|
|
|
|
posterior to the prior ensures that generating from any part
|
|
|
|
|
of prior space results in meaningful samples. As a result, the
|
|
|
|
|
decoder of the adversarial autoencoder learns a deep
|
|
|
|
|
generative model that maps the imposed prior to the data
|
|
|
|
|
distribution. We show how the adversarial autoencoder can be
|
|
|
|
|
used in applications such as semi-supervised classification,
|
|
|
|
|
disentangling style and content of images, unsupervised
|
|
|
|
|
clustering, dimensionality reduction and data visualization.
|
|
|
|
|
We performed experiments on MNIST, Street View House Numbers
|
|
|
|
|
and Toronto Face datasets and show that adversarial
|
|
|
|
|
autoencoders achieve competitive results in generative
|
|
|
|
|
modeling and semi-supervised classification tasks.},
|
|
|
|
|
archivePrefix = {arXiv},
|
|
|
|
|
eprint = {1511.05644v2},
|
|
|
|
|
primaryClass = {cs.LG},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Yoo_2020,
|
|
|
|
|
author = {Yoo, Jaeyoung and Lee, Hojun and Kwak, Nojun},
|
|
|
|
|
title = {Unpriortized Autoencoder For Image Generation},
|
|
|
|
|
journal = {2020 IEEE International Conference on Image Processing
|
|
|
|
|
(ICIP)},
|
|
|
|
|
year = 2020,
|
|
|
|
|
month = {Oct},
|
|
|
|
|
doi = {10.1109/icip40778.2020.9191173},
|
|
|
|
|
url = {http://dx.doi.org/10.1109/ICIP40778.2020.9191173},
|
|
|
|
|
ISBN = 9781728163956,
|
|
|
|
|
publisher = {IEEE}
|
|
|
|
|
}
|
2021-07-03 19:30:14 +02:00
|
|
|
|
|
2021-07-03 18:12:53 +02:00
|
|
|
|
@article{kaiser18_discr_autoen_sequen_model,
|
|
|
|
|
author = {Kaiser, Łukasz and Bengio, Samy},
|
|
|
|
|
title = {Discrete Autoencoders for Sequence Models},
|
|
|
|
|
journal = {CoRR},
|
|
|
|
|
year = 2018,
|
|
|
|
|
url = {http://arxiv.org/abs/1801.09797v1},
|
|
|
|
|
abstract = {Recurrent models for sequences have been recently
|
|
|
|
|
successful at many tasks, especially for language modeling and
|
|
|
|
|
machine translation. Nevertheless, it remains challenging to
|
|
|
|
|
extract good representations from these models. For instance,
|
|
|
|
|
even though language has a clear hierarchical structure going
|
|
|
|
|
from characters through words to sentences, it is not apparent
|
|
|
|
|
in current language models. We propose to improve the
|
|
|
|
|
representation in sequence models by augmenting current
|
|
|
|
|
approaches with an autoencoder that is forced to compress the
|
|
|
|
|
sequence through an intermediate discrete latent space. In
|
|
|
|
|
order to propagate gradients though this discrete
|
|
|
|
|
representation we introduce an improved semantic hashing
|
|
|
|
|
technique. We show that this technique performs well on a
|
|
|
|
|
newly proposed quantitative efficiency measure. We also
|
|
|
|
|
analyze latent codes produced by the model showing how they
|
|
|
|
|
correspond to words and phrases. Finally, we present an
|
|
|
|
|
application of the autoencoder-augmented model to generating
|
|
|
|
|
diverse translations.},
|
|
|
|
|
archivePrefix = {arXiv},
|
|
|
|
|
eprint = {1801.09797v1},
|
|
|
|
|
primaryClass = {cs.LG},
|
|
|
|
|
}
|
2021-07-03 19:30:14 +02:00
|
|
|
|
|
|
|
|
|
@misc{brownlee_2020,
|
|
|
|
|
title = {How Do Convolutional Layers Work in Deep Learning Neural
|
|
|
|
|
Networks?},
|
|
|
|
|
url = {https://machinelearningmastery.com/convolutional-layers-for-deep-learning-neural-networks/},
|
|
|
|
|
journal = {Machine Learning Mastery},
|
|
|
|
|
author = {Brownlee, Jason},
|
|
|
|
|
year = 2020,
|
|
|
|
|
month = {Apr}
|
|
|
|
|
}
|
2021-07-04 03:38:37 +02:00
|
|
|
|
|
2021-07-03 19:30:14 +02:00
|
|
|
|
@article{howard17_mobil,
|
|
|
|
|
author = {Howard, Andrew G. and Zhu, Menglong and Chen, Bo and
|
|
|
|
|
Kalenichenko, Dmitry and Wang, Weijun and Weyand, Tobias and
|
|
|
|
|
Andreetto, Marco and Adam, Hartwig},
|
|
|
|
|
title = {Mobilenets: Efficient Convolutional Neural Networks for
|
|
|
|
|
Mobile Vision Applications},
|
|
|
|
|
journal = {CoRR},
|
|
|
|
|
year = 2017,
|
|
|
|
|
url = {http://arxiv.org/abs/1704.04861v1},
|
|
|
|
|
abstract = {We present a class of efficient models called MobileNets
|
|
|
|
|
for mobile and embedded vision applications. MobileNets are
|
|
|
|
|
based on a streamlined architecture that uses depth-wise
|
|
|
|
|
separable convolutions to build light weight deep neural
|
|
|
|
|
networks. We introduce two simple global hyper-parameters that
|
|
|
|
|
efficiently trade off between latency and accuracy. These
|
|
|
|
|
hyper-parameters allow the model builder to choose the right
|
|
|
|
|
sized model for their application based on the constraints of
|
|
|
|
|
the problem. We present extensive experiments on resource and
|
|
|
|
|
accuracy tradeoffs and show strong performance compared to
|
|
|
|
|
other popular models on ImageNet classification. We then
|
|
|
|
|
demonstrate the effectiveness of MobileNets across a wide
|
|
|
|
|
range of applications and use cases including object
|
|
|
|
|
detection, finegrain classification, face attributes and large
|
|
|
|
|
scale geo-localization.},
|
|
|
|
|
archivePrefix = {arXiv},
|
|
|
|
|
eprint = {1704.04861v1},
|
|
|
|
|
primaryClass = {cs.CV},
|
|
|
|
|
}
|
2021-07-04 03:38:37 +02:00
|
|
|
|
|
2021-07-03 19:30:14 +02:00
|
|
|
|
@article{ronneberger15_u_net,
|
|
|
|
|
author = {Ronneberger, Olaf and Fischer, Philipp and Brox, Thomas},
|
|
|
|
|
title = {U-Net: Convolutional Networks for Biomedical Image
|
|
|
|
|
Segmentation},
|
|
|
|
|
journal = {CoRR},
|
|
|
|
|
year = 2015,
|
|
|
|
|
url = {http://arxiv.org/abs/1505.04597v1},
|
|
|
|
|
abstract = {There is large consent that successful training of deep
|
|
|
|
|
networks requires many thousand annotated training samples. In
|
|
|
|
|
this paper, we present a network and training strategy that
|
|
|
|
|
relies on the strong use of data augmentation to use the
|
|
|
|
|
available annotated samples more efficiently. The architecture
|
|
|
|
|
consists of a contracting path to capture context and a
|
|
|
|
|
symmetric expanding path that enables precise localization. We
|
|
|
|
|
show that such a network can be trained end-to-end from very
|
|
|
|
|
few images and outperforms the prior best method (a
|
|
|
|
|
sliding-window convolutional network) on the ISBI challenge
|
|
|
|
|
for segmentation of neuronal structures in electron
|
|
|
|
|
microscopic stacks. Using the same network trained on
|
|
|
|
|
transmitted light microscopy images (phase contrast and DIC)
|
|
|
|
|
we won the ISBI cell tracking challenge 2015 in these
|
|
|
|
|
categories by a large margin. Moreover, the network is fast.
|
|
|
|
|
Segmentation of a 512x512 image takes less than a second on a
|
|
|
|
|
recent GPU. The full implementation (based on Caffe) and the
|
|
|
|
|
trained networks are available at
|
|
|
|
|
http://lmb.informatik.uni-freiburg.de/people/ronneber/u-net .},
|
|
|
|
|
archivePrefix = {arXiv},
|
|
|
|
|
eprint = {1505.04597v1},
|
|
|
|
|
primaryClass = {cs.CV},
|
|
|
|
|
}
|
2021-07-04 03:38:37 +02:00
|
|
|
|
|
2021-07-03 19:30:14 +02:00
|
|
|
|
@article{yuan18_simpl_convol_gener_networ_next_item_recom,
|
|
|
|
|
author = {Yuan, Fajie and Karatzoglou, Alexandros and Arapakis,
|
|
|
|
|
Ioannis and Jose, Joemon M and He, Xiangnan},
|
|
|
|
|
title = {A Simple Convolutional Generative Network for Next Item
|
|
|
|
|
Recommendation},
|
|
|
|
|
journal = {CoRR},
|
|
|
|
|
year = 2018,
|
|
|
|
|
url = {http://arxiv.org/abs/1808.05163v4},
|
|
|
|
|
abstract = {Convolutional Neural Networks (CNNs) have been recently
|
|
|
|
|
introduced in the domain of session-based next item
|
|
|
|
|
recommendation. An ordered collection of past items the user
|
|
|
|
|
has interacted with in a session (or sequence) are embedded
|
|
|
|
|
into a 2-dimensional latent matrix, and treated as an image.
|
|
|
|
|
The convolution and pooling operations are then applied to the
|
|
|
|
|
mapped item embeddings. In this paper, we first examine the
|
|
|
|
|
typical session-based CNN recommender and show that both the
|
|
|
|
|
generative model and network architecture are suboptimal when
|
|
|
|
|
modeling long-range dependencies in the item sequence. To
|
|
|
|
|
address the issues, we introduce a simple, but very effective
|
|
|
|
|
generative model that is capable of learning high-level
|
|
|
|
|
representation from both short- and long-range item
|
|
|
|
|
dependencies. The network architecture of the proposed model
|
|
|
|
|
is formed of a stack of \emph{holed} convolutional layers,
|
|
|
|
|
which can efficiently increase the receptive fields without
|
|
|
|
|
relying on the pooling operation. Another contribution is the
|
|
|
|
|
effective use of residual block structure in recommender
|
|
|
|
|
systems, which can ease the optimization for much deeper
|
|
|
|
|
networks. The proposed generative model attains
|
|
|
|
|
state-of-the-art accuracy with less training time in the next
|
|
|
|
|
item recommendation task. It accordingly can be used as a
|
|
|
|
|
powerful recommendation baseline to beat in future, especially
|
|
|
|
|
when there are long sequences of user feedback.},
|
|
|
|
|
archivePrefix = {arXiv},
|
|
|
|
|
eprint = {1808.05163v4},
|
|
|
|
|
primaryClass = {cs.IR},
|
|
|
|
|
}
|
2021-07-04 03:38:37 +02:00
|
|
|
|
|
2021-07-03 19:30:14 +02:00
|
|
|
|
@article{sadr21_novel_deep_learn_method_textual_sentim_analy,
|
|
|
|
|
author = {Sadr, Hossein and Solimandarabi, Mozhdeh Nazari and Pedram,
|
|
|
|
|
Mir Mohsen and Teshnehlab, Mohammad},
|
|
|
|
|
title = {A Novel Deep Learning Method for Textual Sentiment
|
|
|
|
|
Analysis},
|
|
|
|
|
journal = {CoRR},
|
|
|
|
|
year = 2021,
|
|
|
|
|
url = {http://arxiv.org/abs/2102.11651v1},
|
|
|
|
|
abstract = {Sentiment analysis is known as one of the most crucial
|
|
|
|
|
tasks in the field of natural language processing and
|
|
|
|
|
Convolutional Neural Network (CNN) is one of those prominent
|
|
|
|
|
models that is commonly used for this aim. Although
|
|
|
|
|
convolutional neural networks have obtained remarkable results
|
|
|
|
|
in recent years, they are still confronted with some
|
|
|
|
|
limitations. Firstly, they consider that all words in a
|
|
|
|
|
sentence have equal contributions in the sentence meaning
|
|
|
|
|
representation and are not able to extract informative words.
|
|
|
|
|
Secondly, they require a large number of training data to
|
|
|
|
|
obtain considerable results while they have many parameters
|
|
|
|
|
that must be accurately adjusted. To this end, a convolutional
|
|
|
|
|
neural network integrated with a hierarchical attention layer
|
|
|
|
|
is proposed which is able to extract informative words and
|
|
|
|
|
assign them higher weight. Moreover, the effect of transfer
|
|
|
|
|
learning that transfers knowledge learned in the source domain
|
|
|
|
|
to the target domain with the aim of improving the performance
|
|
|
|
|
is also explored. Based on the empirical results, the proposed
|
|
|
|
|
model not only has higher classification accuracy and can
|
|
|
|
|
extract informative words but also applying incremental
|
|
|
|
|
transfer learning can significantly enhance the classification
|
|
|
|
|
performance.},
|
|
|
|
|
archivePrefix = {arXiv},
|
|
|
|
|
eprint = {2102.11651},
|
|
|
|
|
primaryClass = {cs.CL},
|
|
|
|
|
}
|
2021-07-04 03:38:37 +02:00
|
|
|
|
|
|
|
|
|
@book{book:930,
|
|
|
|
|
title = {Bioinformatics: the machine learning approach},
|
|
|
|
|
author = {Pierre Baldi, Søren Brunak},
|
|
|
|
|
publisher = {The MIT Press},
|
|
|
|
|
isbn = {026202506X,9780585444666,9780262025065},
|
|
|
|
|
year = 2001,
|
|
|
|
|
series = {Adaptive Computation and Machine Learning},
|
|
|
|
|
edition = 2,
|
|
|
|
|
pages = 12,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Schneider_2011,
|
|
|
|
|
author = {Schneider, Maria V. and Orchard, Sandra},
|
|
|
|
|
title = {Omics Technologies, Data and Bioinformatics Principles},
|
|
|
|
|
journal = {Bioinformatics for Omics Data},
|
|
|
|
|
year = 2011,
|
|
|
|
|
pages = {3–30},
|
|
|
|
|
issn = {1940-6029},
|
|
|
|
|
doi = {10.1007/978-1-61779-027-0_1},
|
|
|
|
|
url = {http://dx.doi.org/10.1007/978-1-61779-027-0_1},
|
|
|
|
|
ISBN = 9781617790270,
|
|
|
|
|
publisher = {Humana Press},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Peri_2020,
|
|
|
|
|
author = {Peri, Sateesh and Roberts, Sarah and Kreko, Isabella R. and
|
|
|
|
|
McHan, Lauren B. and Naron, Alexandra and Ram, Archana and
|
|
|
|
|
Murphy, Rebecca L. and Lyons, Eric and Gregory, Brian D. and
|
|
|
|
|
Devisetty, Upendra K. and et al.},
|
|
|
|
|
title = {Read Mapping and Transcript Assembly: A Scalable and
|
|
|
|
|
High-Throughput Workflow for the Processing and Analysis of
|
|
|
|
|
Ribonucleic Acid Sequencing Data},
|
|
|
|
|
journal = {Frontiers in Genetics},
|
|
|
|
|
year = 2020,
|
|
|
|
|
volume = 10,
|
|
|
|
|
month = {Jan},
|
|
|
|
|
issn = {1664-8021},
|
|
|
|
|
doi = {10.3389/fgene.2019.01361},
|
|
|
|
|
url = {http://dx.doi.org/10.3389/fgene.2019.01361},
|
|
|
|
|
publisher = {Frontiers Media SA}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Zerbino_2008,
|
|
|
|
|
author = {Zerbino, D. R. and Birney, E.},
|
|
|
|
|
title = {Velvet: Algorithms for de novo short read assembly using de
|
|
|
|
|
Bruijn graphs},
|
|
|
|
|
journal = {Genome Research},
|
|
|
|
|
year = 2008,
|
|
|
|
|
volume = 18,
|
|
|
|
|
number = 5,
|
|
|
|
|
month = {Feb},
|
|
|
|
|
pages = {821–829},
|
|
|
|
|
issn = {1088-9051},
|
|
|
|
|
doi = {10.1101/gr.074492.107},
|
|
|
|
|
url = {http://dx.doi.org/10.1101/gr.074492.107},
|
|
|
|
|
publisher = {Cold Spring Harbor Laboratory}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Spudich_2007,
|
|
|
|
|
author = {Spudich, G. and Fernandez-Suarez, X. M. and Birney, E.},
|
|
|
|
|
title = {Genome browsing with Ensembl: a practical overview},
|
|
|
|
|
journal = {Briefings in Functional Genomics and Proteomics},
|
|
|
|
|
year = 2007,
|
|
|
|
|
volume = 6,
|
|
|
|
|
number = 3,
|
|
|
|
|
month = {Aug},
|
|
|
|
|
pages = {202–219},
|
|
|
|
|
issn = {1477-4062},
|
|
|
|
|
doi = {10.1093/bfgp/elm025},
|
|
|
|
|
url = {http://dx.doi.org/10.1093/bfgp/elm025},
|
|
|
|
|
publisher = {Oxford University Press (OUP)}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Liu_2018,
|
|
|
|
|
author = {Liu, Yang and Ye, Qing and Wang, Liwei and Peng, Jian},
|
|
|
|
|
title = {Learning structural motif representations for efficient
|
|
|
|
|
protein structure search},
|
|
|
|
|
journal = {Bioinformatics},
|
|
|
|
|
year = 2018,
|
|
|
|
|
volume = 34,
|
|
|
|
|
number = 17,
|
|
|
|
|
month = {Sep},
|
|
|
|
|
pages = {i773–i780},
|
|
|
|
|
issn = {1460-2059},
|
|
|
|
|
doi = {10.1093/bioinformatics/bty585},
|
|
|
|
|
url = {http://dx.doi.org/10.1093/bioinformatics/bty585},
|
|
|
|
|
publisher = {Oxford University Press (OUP)}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Salmela_2011,
|
|
|
|
|
author = {Salmela, L. and Schroder, J.},
|
|
|
|
|
title = {Correcting errors in short reads by multiple alignments},
|
|
|
|
|
journal = {Bioinformatics},
|
|
|
|
|
year = 2011,
|
|
|
|
|
volume = 27,
|
|
|
|
|
number = 11,
|
|
|
|
|
month = {Apr},
|
|
|
|
|
pages = {1455–1461},
|
|
|
|
|
issn = {1460-2059},
|
|
|
|
|
doi = {10.1093/bioinformatics/btr170},
|
|
|
|
|
url = {http://dx.doi.org/10.1093/bioinformatics/btr170},
|
|
|
|
|
publisher = {Oxford University Press (OUP)}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Yang_2012,
|
|
|
|
|
author = {Yang, X. and Chockalingam, S. P. and Aluru, S.},
|
|
|
|
|
title = {A survey of error-correction methods for next-generation
|
|
|
|
|
sequencing},
|
|
|
|
|
journal = {Briefings in Bioinformatics},
|
|
|
|
|
year = 2012,
|
|
|
|
|
volume = 14,
|
|
|
|
|
number = 1,
|
|
|
|
|
month = {Apr},
|
|
|
|
|
pages = {56–66},
|
|
|
|
|
issn = {1477-4054},
|
|
|
|
|
doi = {10.1093/bib/bbs015},
|
|
|
|
|
url = {http://dx.doi.org/10.1093/bib/bbs015},
|
|
|
|
|
publisher = {Oxford University Press (OUP)}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Kelley_2010,
|
|
|
|
|
author = {Kelley, David R and Schatz, Michael C and Salzberg, Steven
|
|
|
|
|
L},
|
|
|
|
|
title = {Quake: quality-aware detection and correction of sequencing
|
|
|
|
|
errors},
|
|
|
|
|
journal = {Genome Biology},
|
|
|
|
|
year = 2010,
|
|
|
|
|
volume = 11,
|
|
|
|
|
number = 11,
|
|
|
|
|
pages = {R116},
|
|
|
|
|
issn = {1465-6906},
|
|
|
|
|
doi = {10.1186/gb-2010-11-11-r116},
|
|
|
|
|
url = {http://dx.doi.org/10.1186/gb-2010-11-11-r116},
|
|
|
|
|
publisher = {Springer Science and Business Media LLC}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Zhao_2017,
|
|
|
|
|
author = {Zhao, Liang and Chen, Qingfeng and Li, Wencui and Jiang,
|
|
|
|
|
Peng and Wong, Limsoon and Li, Jinyan},
|
|
|
|
|
title = {MapReduce for accurate error correction of next-generation
|
|
|
|
|
sequencing data},
|
|
|
|
|
journal = {Bioinformatics},
|
|
|
|
|
year = 2017,
|
|
|
|
|
editor = {Sahinalp, CenkEditor},
|
|
|
|
|
volume = 33,
|
|
|
|
|
number = 23,
|
|
|
|
|
month = {Feb},
|
|
|
|
|
pages = {3844–3851},
|
|
|
|
|
issn = {1460-2059},
|
|
|
|
|
doi = {10.1093/bioinformatics/btx089},
|
|
|
|
|
url = {http://dx.doi.org/10.1093/bioinformatics/btx089},
|
|
|
|
|
publisher = {Oxford University Press (OUP)}
|
|
|
|
|
}
|
2021-07-05 02:01:23 +02:00
|
|
|
|
|
|
|
|
|
@inproceedings{inproceedings,
|
|
|
|
|
author = {Dolstra, Eelco and Jonge, Merijn and Visser, Eelco},
|
|
|
|
|
year = 2004,
|
|
|
|
|
month = 01,
|
|
|
|
|
pages = {79-92},
|
|
|
|
|
title = {Nix: A Safe and Policy-Free System for Software
|
|
|
|
|
Deployment.}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Caboche_2014,
|
|
|
|
|
author = {Caboche, Ségolène and Audebert, Christophe and Lemoine,
|
|
|
|
|
Yves and Hot, David},
|
|
|
|
|
title = {Comparison of mapping algorithms used in high-throughput
|
|
|
|
|
sequencing: application to Ion Torrent data},
|
|
|
|
|
journal = {BMC Genomics},
|
|
|
|
|
year = 2014,
|
|
|
|
|
volume = 15,
|
|
|
|
|
number = 1,
|
|
|
|
|
pages = 264,
|
|
|
|
|
issn = {1471-2164},
|
|
|
|
|
doi = {10.1186/1471-2164-15-264},
|
|
|
|
|
url = {http://dx.doi.org/10.1186/1471-2164-15-264},
|
|
|
|
|
publisher = {Springer Science and Business Media LLC}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Weber_2020,
|
|
|
|
|
author = {Weber, Cédric R and Akbar, Rahmad and Yermanos, Alexander
|
|
|
|
|
and Pavlović, Milena and Snapkov, Igor and Sandve, Geir K and
|
|
|
|
|
Reddy, Sai T and Greiff, Victor},
|
|
|
|
|
title = {immuneSIM: tunable multi-feature simulation of B- and
|
|
|
|
|
T-cell receptor repertoires for immunoinformatics
|
|
|
|
|
benchmarking},
|
|
|
|
|
journal = {Bioinformatics},
|
|
|
|
|
year = 2020,
|
|
|
|
|
editor = {Schwartz, RussellEditor},
|
|
|
|
|
volume = 36,
|
|
|
|
|
number = 11,
|
|
|
|
|
month = {Apr},
|
|
|
|
|
pages = {3594–3596},
|
|
|
|
|
issn = {1460-2059},
|
|
|
|
|
doi = {10.1093/bioinformatics/btaa158},
|
|
|
|
|
url = {http://dx.doi.org/10.1093/bioinformatics/btaa158},
|
|
|
|
|
publisher = {Oxford University Press (OUP)}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Cock_2009,
|
|
|
|
|
author = {Cock, P. J. A. and Antao, T. and Chang, J. T. and Chapman,
|
|
|
|
|
B. A. and Cox, C. J. and Dalke, A. and Friedberg, I. and
|
|
|
|
|
Hamelryck, T. and Kauff, F. and Wilczynski, B. and et al.},
|
|
|
|
|
title = {Biopython: freely available Python tools for computational
|
|
|
|
|
molecular biology and bioinformatics},
|
|
|
|
|
journal = {Bioinformatics},
|
|
|
|
|
year = 2009,
|
|
|
|
|
volume = 25,
|
|
|
|
|
number = 11,
|
|
|
|
|
month = {Mar},
|
|
|
|
|
pages = {1422–1423},
|
|
|
|
|
issn = {1460-2059},
|
|
|
|
|
doi = {10.1093/bioinformatics/btp163},
|
|
|
|
|
url = {http://dx.doi.org/10.1093/bioinformatics/btp163},
|
|
|
|
|
publisher = {Oxford University Press (OUP)}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@misc{tensorflow2015-whitepaper,
|
|
|
|
|
title = { {TensorFlow}: Large-Scale Machine Learning on
|
|
|
|
|
Heterogeneous Systems},
|
|
|
|
|
url = {https://www.tensorflow.org/},
|
|
|
|
|
note = {Software available from tensorflow.org},
|
|
|
|
|
author = { Mart\'{\i}n~Abadi and Ashish~Agarwal and Paul~Barham and
|
|
|
|
|
Eugene~Brevdo and Zhifeng~Chen and Craig~Citro and
|
|
|
|
|
Greg~S.~Corrado and Andy~Davis and Jeffrey~Dean and
|
|
|
|
|
Matthieu~Devin and Sanjay~Ghemawat and Ian~Goodfellow and
|
|
|
|
|
Andrew~Harp and Geoffrey~Irving and Michael~Isard and Yangqing
|
|
|
|
|
Jia and Rafal~Jozefowicz and Lukasz~Kaiser and
|
|
|
|
|
Manjunath~Kudlur and Josh~Levenberg and Dandelion~Man\'{e} and
|
|
|
|
|
Rajat~Monga and Sherry~Moore and Derek~Murray and Chris~Olah
|
|
|
|
|
and Mike~Schuster and Jonathon~Shlens and Benoit~Steiner and
|
|
|
|
|
Ilya~Sutskever and Kunal~Talwar and Paul~Tucker and
|
|
|
|
|
Vincent~Vanhoucke and Vijay~Vasudevan and Fernanda~Vi\'{e}gas
|
|
|
|
|
and Oriol~Vinyals and Pete~Warden and Martin~Wattenberg and
|
|
|
|
|
Martin~Wicke and Yuan~Yu and Xiaoqiang~Zheng},
|
|
|
|
|
year = 2015,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@misc{Biostrings,
|
|
|
|
|
title = {Biostrings: Efficient manipulation of biological strings},
|
|
|
|
|
author = {H. Pagès and P. Aboyoun and R. Gentleman and S. DebRoy},
|
|
|
|
|
year = 2019,
|
|
|
|
|
note = {R package version 2.50.2},
|
|
|
|
|
}
|
2021-07-07 02:31:57 +02:00
|
|
|
|
|
|
|
|
|
@Article{Zhang_2003,
|
|
|
|
|
author = {Zhang, Shichao and Zhang, Chengqi and Yang, Qiang},
|
|
|
|
|
title = {Data preparation for data mining},
|
|
|
|
|
journal = {Applied Artificial Intelligence},
|
|
|
|
|
year = 2003,
|
|
|
|
|
volume = 17,
|
|
|
|
|
number = {5-6},
|
|
|
|
|
month = {May},
|
|
|
|
|
pages = {375–381},
|
|
|
|
|
issn = {1087-6545},
|
|
|
|
|
doi = {10.1080/713827180},
|
|
|
|
|
url = {http://dx.doi.org/10.1080/713827180},
|
|
|
|
|
publisher = {Informa UK Limited}
|
|
|
|
|
}
|
2021-07-07 03:08:51 +02:00
|
|
|
|
|
|
|
|
|
@Article{Lopez_Moreno_2016,
|
|
|
|
|
author = {Lopez-Moreno, Ignacio and Gonzalez-Dominguez, Javier and
|
|
|
|
|
Martinez, David and Plchot, Oldřich and Gonzalez-Rodriguez,
|
|
|
|
|
Joaquin and Moreno, Pedro J.},
|
|
|
|
|
title = {On the use of deep feedforward neural networks for
|
|
|
|
|
automatic language identification},
|
|
|
|
|
journal = {Computer Speech & Language},
|
|
|
|
|
year = 2016,
|
|
|
|
|
volume = 40,
|
|
|
|
|
month = {Nov},
|
|
|
|
|
pages = {46–59},
|
|
|
|
|
issn = {0885-2308},
|
|
|
|
|
doi = {10.1016/j.csl.2016.03.001},
|
|
|
|
|
url = {http://dx.doi.org/10.1016/j.csl.2016.03.001},
|
|
|
|
|
publisher = {Elsevier BV}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Chakraborty_2020,
|
|
|
|
|
author = {Chakraborty, Sourav and Choudhary, Arun Kumar and Sarma,
|
|
|
|
|
Mausumi and Hazarika, Manuj Kumar},
|
|
|
|
|
title = {Reaction order and neural network approaches for the
|
|
|
|
|
simulation of COVID-19 spreading kinetic in India},
|
|
|
|
|
journal = {Infectious Disease Modelling},
|
|
|
|
|
year = 2020,
|
|
|
|
|
volume = 5,
|
|
|
|
|
pages = {737–747},
|
|
|
|
|
issn = {2468-0427},
|
|
|
|
|
doi = {10.1016/j.idm.2020.09.002},
|
|
|
|
|
url = {http://dx.doi.org/10.1016/j.idm.2020.09.002},
|
|
|
|
|
publisher = {Elsevier BV}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Article{Mansoor_2021,
|
|
|
|
|
author = {Mansoor, Muhammad and Grimaccia, Francesco and Leva, Sonia
|
|
|
|
|
and Mussetta, Marco},
|
|
|
|
|
title = {Comparison of echo state network and feed-forward neural
|
|
|
|
|
networks in electrical load forecasting for demand response
|
|
|
|
|
programs},
|
|
|
|
|
journal = {Mathematics and Computers in Simulation},
|
|
|
|
|
year = 2021,
|
|
|
|
|
volume = 184,
|
|
|
|
|
month = {Jun},
|
|
|
|
|
pages = {282–293},
|
|
|
|
|
issn = {0378-4754},
|
|
|
|
|
doi = {10.1016/j.matcom.2020.07.011},
|
|
|
|
|
url = {http://dx.doi.org/10.1016/j.matcom.2020.07.011},
|
|
|
|
|
publisher = {Elsevier BV}
|
|
|
|
|
}
|
2021-07-07 04:50:16 +02:00
|
|
|
|
|
|
|
|
|
@Article{Baker_2016,
|
|
|
|
|
author = {Baker, Monya},
|
|
|
|
|
title = {1,500 scientists lift the lid on reproducibility},
|
|
|
|
|
journal = {Nature},
|
|
|
|
|
year = 2016,
|
|
|
|
|
volume = 533,
|
|
|
|
|
number = 7604,
|
|
|
|
|
month = {May},
|
|
|
|
|
pages = {452–454},
|
|
|
|
|
issn = {1476-4687},
|
|
|
|
|
doi = {10.1038/533452a},
|
|
|
|
|
url = {http://dx.doi.org/10.1038/533452a},
|
|
|
|
|
publisher = {Springer Science and Business Media LLC}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@MISC{Stodden13publishingstandards,
|
|
|
|
|
author = {Victoria Stodden and Jonathan Borwein and David H. Bailey},
|
|
|
|
|
title = {Publishing Standards for Computational Science: “Setting
|
|
|
|
|
the Default to Reproducible”},
|
|
|
|
|
year = 2013
|
|
|
|
|
}
|