From f9aabb10994a83cc3fc0451a41c5a00da9ff7668 Mon Sep 17 00:00:00 2001 From: "jinghuazhao@github.com" Date: Tue, 10 Dec 2024 09:40:22 +0000 Subject: [PATCH] Deployed bb21f4f with MkDocs version: 1.5.3 --- Computing/index.html | 8 +++++++- index.html | 2 +- search/search_index.json | 2 +- sitemap.xml.gz | Bin 127 -> 127 bytes 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/Computing/index.html b/Computing/index.html index 426007c..35c4bd8 100644 --- a/Computing/index.html +++ b/Computing/index.html @@ -277,7 +277,13 @@

Online documentation/software/uti
  • PhotoScissors, https://photoscissors.com/
  • QGIS, https://www.qgis.org/
  • Quarto, https://quarto.org/
  • -
  • RStudio, https://www.rstudio.com/ download (Mastering Shiny, shinyapps)
  • +
  • RStudio, https://posit.co/ (https://www.rstudio.com/) +
  • Rufus, https://rufus.ie/en/
  • Scribus, https://sourceforge.net/projects/scribus/
  • Speedtest, https://www.speedtest.net/
  • diff --git a/index.html b/index.html index 71ca0f0..a7e7f03 100644 --- a/index.html +++ b/index.html @@ -219,5 +219,5 @@

    diff --git a/search/search_index.json b/search/search_index.json index 0b901d1..7eac07e 100644 --- a/search/search_index.json +++ b/search/search_index.json @@ -1 +1 @@ -{"config":{"indexing":"full","lang":["en"],"min_search_length":3,"prebuild_index":false,"separator":"[\\s\\-]+"},"docs":[{"location":"","text":"Computational Statistics Online resources for computational statistics The repository intends to make it easier to keep pace with releases of Linux systems and R, among others; select the menu items above for details. INSTALL . Installation and setup. LANGUAGES . Language-related contents. PARALLEL . Parallel computing. REPRODUCE . Reproducible research. SYSTEMS . System-related materials. WEB: AI-related sites , Computing resources , Flask , Utilities","title":""},{"location":"#computational-statistics","text":"Online resources for computational statistics The repository intends to make it easier to keep pace with releases of Linux systems and R, among others; select the menu items above for details. INSTALL . Installation and setup. LANGUAGES . Language-related contents. PARALLEL . Parallel computing. REPRODUCE . Reproducible research. SYSTEMS . System-related materials. WEB: AI-related sites , Computing resources , Flask , Utilities","title":"Computational Statistics"},{"location":"AI/","text":"AI-related sites AI Act, https://digital-strategy.ec.europa.eu/en/policies/regulatory-framework-ai Anthropic, https://www.anthropic.com/ Claude, https://claude.ai sonnet, https://www.anthropic.com/claude/sonnet GitHub, https://github.com/lm-sys LMSYS Org, https://lmsys.org/ Chatbot Arena, https://lmarena.ai/ ChatPDF, https://www.chatpdf.com/ Crayon, https://www.craiyon.com/ fast.ai, https://github.com/fastai Google, https://www.google.com/ Colab, https://colab.research.google.com/ DeepMind, https://deepmind.google/ Gemini, https://gemini.google.com/ Hugging Face, https://huggingface.co Datasets, https://huggingface.co/datasets HuggingChat, https://huggingface.co/chat/ Llama-3.3-70B-Instruct, https://huggingface.co/meta-llama/Llama-3.3-70B-Instruct Models, https://huggingface.co/models Kimi chat, https://kimi.moonshot.cn/ (PDF processing) Meta, https://www.facebook.com/business Llama, https://www.llama.com/ ( terms ) Meta AI, https://www.meta.ai/ Nougat, https://facebookresearch.github.io/nougat/ MicroSoft Bing Image Creator, https://www.bing.com/images/create/ Midjourney, https://www.midjourney.com/home No-Code LLM App Builders Dify, https://dify.ai/ Flowwise AI, https://flowiseai.com/ LangFlow, https://www.langflow.org/ OpenAI, https://openai.com/ ChatGPT, https://chatgpt.com/ CLIP, https://openai.com/index/clip/ DALL\u00b7E 3, https://openai.com/index/dall-e-3/ GitHub, https://github.com/openai sora, https://openai.com/index/sora/ Open-source LLMs DeepEval (>> Humanloop), https://github.com/confident-ai/deepeval Llama-3.1 (>> Open AI GPT-4), https://huggingface.co/meta-llama/Llama-3.1-8B LangChain (>> OpenAI Assistants), https://github.com/langchain-ai/langchain Flowwise (>> Relevance AI), https://github.com/FlowiseAI/Flowise Lite LLM (>> Martian AI ), https://github.com/BerriAI/litellm PaddlePaddle, https://github.com/PaddlePaddle Perplexity AI, https://www.perplexity.ai/ (search) Reka, https://chat.reka.ai/ Stability AI, https://stability.ai/ Stable diffusion, https://github.com/CompVis","title":"AI-related sites"},{"location":"AI/#ai-related-sites","text":"AI Act, https://digital-strategy.ec.europa.eu/en/policies/regulatory-framework-ai Anthropic, https://www.anthropic.com/ Claude, https://claude.ai sonnet, https://www.anthropic.com/claude/sonnet GitHub, https://github.com/lm-sys LMSYS Org, https://lmsys.org/ Chatbot Arena, https://lmarena.ai/ ChatPDF, https://www.chatpdf.com/ Crayon, https://www.craiyon.com/ fast.ai, https://github.com/fastai Google, https://www.google.com/ Colab, https://colab.research.google.com/ DeepMind, https://deepmind.google/ Gemini, https://gemini.google.com/ Hugging Face, https://huggingface.co Datasets, https://huggingface.co/datasets HuggingChat, https://huggingface.co/chat/ Llama-3.3-70B-Instruct, https://huggingface.co/meta-llama/Llama-3.3-70B-Instruct Models, https://huggingface.co/models Kimi chat, https://kimi.moonshot.cn/ (PDF processing) Meta, https://www.facebook.com/business Llama, https://www.llama.com/ ( terms ) Meta AI, https://www.meta.ai/ Nougat, https://facebookresearch.github.io/nougat/ MicroSoft Bing Image Creator, https://www.bing.com/images/create/ Midjourney, https://www.midjourney.com/home No-Code LLM App Builders Dify, https://dify.ai/ Flowwise AI, https://flowiseai.com/ LangFlow, https://www.langflow.org/ OpenAI, https://openai.com/ ChatGPT, https://chatgpt.com/ CLIP, https://openai.com/index/clip/ DALL\u00b7E 3, https://openai.com/index/dall-e-3/ GitHub, https://github.com/openai sora, https://openai.com/index/sora/ Open-source LLMs DeepEval (>> Humanloop), https://github.com/confident-ai/deepeval Llama-3.1 (>> Open AI GPT-4), https://huggingface.co/meta-llama/Llama-3.1-8B LangChain (>> OpenAI Assistants), https://github.com/langchain-ai/langchain Flowwise (>> Relevance AI), https://github.com/FlowiseAI/Flowise Lite LLM (>> Martian AI ), https://github.com/BerriAI/litellm PaddlePaddle, https://github.com/PaddlePaddle Perplexity AI, https://www.perplexity.ai/ (search) Reka, https://chat.reka.ai/ Stability AI, https://stability.ai/ Stable diffusion, https://github.com/CompVis","title":"AI-related sites"},{"location":"Computing/","text":"Computing resources Online publications Cambridge English Dictionary, https://dictionary.cambridge.org/dictionary/ Free literature for your device, https://www.planetebook.com/ Handbook of Statistics, https://www.sciencedirect.com/handbook/handbook-of-statistics Merriam-Webster dictionary, https://www.merriam-webster.com/ ManyBooks, https://manybooks.net/ Springer Link, https://link.springer.com/ The book of statistical proofs, https://statproofbook.github.io/ Reference management AuthorArranger, https://authorarranger.nci.nih.gov/ citavi, https://www.citavi.com/ EndNote, https://support.clarivate.com/Endnote/s/?language=en_US ( MyEndNoteWeb ) JabRef, https://www.jabref.org/ Lead2Amazon, https://lead.to/amazon/uk/ Mendeley, https://www.mendeley.com/ Papers, https://www.papersapp.com/ ProQuest, https://about.proquest.com/en/ ( https://refworks.proquest.com/ ) PubMed, https://pubmed.ncbi.nlm.nih.gov/ Qiqqa, https://github.com/jimmejardine/qiqqa-open-source Sciwheel, get started ( Google Docs add-on , MicroSoft Word add-on ) Web of science, https://www.webofscience.com/wos/woscc/basic-search Zotero, https://www.zotero.org/ ( https://zbib.org/ ) Organisations Apache, https://httpd.apache.org/ APKmirror, https://www.apkmirror.com/ ARIA, https://www.aria.org.uk/ ( https://substack.com/@ariaresearch ) CRA, https://cra.org/ CRAN, https://cran.r-project.org/ CS rankings, https://csrankings.org/ CyberChef, https://gchq.github.io/CyberChef/ Google, https://www.google.co.uk/ Adwords keywords planner, https://ads.google.com/home/tools/keyword-planner/ App Engine, https://cloud.google.com/appengine/ Check My Links DeepMind, https://deepmind.google/ Gemini, https://ai.google.dev/ My groups, https://groups.google.com/my-groups Search console, https://search.google.com/search-console/about Translate, https://translate.google.co.uk/ MicroSoft, https://www.microsoft.com/ Account, https://account.live.com/consent/Manage?uaid=00be9fb584174427b1cffb995d934b24 Bing webmaster tool, https://www.bing.com/webmasters/about Bing image creator, https://www.bing.com/images/create/ dotnet, https://dotnet.microsoft.com/en-us/ Forms GatesNotes, https://www.gatesnotes.com/ Hotmail, https://outlook.live.com/owa/ LINQpad, https://www.linqpad.net/ Support, https://support.microsoft.com/en-us Sway, https://sway.office.com/ Windows 11 virtual machines, https://developer.microsoft.com/en-us/windows/downloads/virtual-machines/ Mozilla, https://www.mozilla.org/en-GB/about/ MySQL, https://dev.mysql.com/ Paper with code, https://paperswithcode.com/ Raspberry Pi OS, https://www.raspberrypi.com/software/ Science Media Centre, https://www.sciencemediacentre.org/ Twitter, https://github.com/twitter zenodo, https://zenodo.org/ Western Digital, https://www.westerndigital.com/ Linux sites Entroware, https://www.entroware.com/store/index.php Fedora, https://getfedora.org/ GNU parallel, https://www.biostars.org/p/63816/ linuxconfig.org, https://linuxconfig.org/ Linux command, https://wangchujiang.com/linux-command/ mythic-beasts, https://www.mythic-beasts.com/ OBI download, https://www.oracle.com/middleware/technologies/business-intelligence-v12213-downloads.html SRCF, https://www.srcf.net/ (site, https://jhz22.user.srcf.net/ ) VirtualBox, https://www.virtualbox.org/ (all downloads, https://download.virtualbox.org/virtualbox/ ) Online documentation/software/utilities Adobe online, https://www.adobe.com/uk/acrobat/online.html Aiseesoft background remover, https://www.aiseesoft.com/online-bg-remover/ apexCharts.js, https://apexcharts.com/ Avalonia Visual Basic6, https://github.com/BAndysc/AvaloniaVisualBasic6 balenaEtcher, https://etcher.balena.io/ Brave, https://brave.com/ brms, https://paul-buerkner.github.io/brms/index.html ( CRAN ) Calibre, https://calibre-ebook.com/ Click, https://click.palletsprojects.com ConvertWizard, https://convertwizard.com/ Dillinger, https://dillinger.io/ Django 5.0 documentation, https://docs.djangoproject.com/en/5.0/ DjVuLibre, http://djvu.sourceforge.net/ djvu.org, http://djvu.org EPUB reader online, https://epub-reader.online/ Equation Editor, https://editor.codecogs.com/ Flapjax, https://www.flapjax-lang.org/ Flask, https://flask.palletsprojects.com/ Foxit, https://www.foxitsoftware.com/ FreeCAD, https://www.freecad.org/ gravis, https://robert-haas.github.io/gravis-docs/index.html HivisionIDPhotos, https://swanhub.co/ZeYiLin/HivisionIDPhotos ( GitHub Icecream PDF Editor, https://icecreamapps.com/PDF-Editor/ iLoveIMG, https://www.iloveimg.com/ iLovePDF, https://www.ilovepdf.com/ IMGonline.com.ua, https://www.imgonline.com.ua/eng/resize-image.php INBO tutorials, https://inbo.github.io/tutorials/ Jinja, https://jinja.palletsprojects.com/ jqplay, https://jqplay.org/ JSEditor, https://jseditor.io/ Krite, https://krita.org/en/ MConverter, https://mconverter.eu/ ( docx2html ) Mermaid, https://mermaid.js.org/ (live editor, https://mermaid.live/ ) MobaXterm, https://mobaxterm.mobatek.net/ nbviewer, https://nbviewer.org/ OddPrints, https://www.oddprints.com/ Oh My Posh, https://ohmyposh.dev/ Online2PDF, https://online2pdf.com/docx2pdf PaddleOCR, https://github.com/PaddlePaddle/PaddleOCR PDF24, https://en.pdf24.org/ PDF2DJVU, https://pdf2djvu.com/ PDFgear, https://www.pdfgear.com/ PDF-XCHANGE, https://www.pdf-xchange.com/ (discontinued viewer ) PNG2PDF, https://png2pdf.com/ PhotoScissors, https://photoscissors.com/ QGIS, https://www.qgis.org/ Quarto, https://quarto.org/ RStudio, https://www.rstudio.com/ download ( Mastering Shiny , shinyapps ) Rufus, https://rufus.ie/en/ Scribus, https://sourceforge.net/projects/scribus/ Speedtest, https://www.speedtest.net/ Template-Rcpp, https://github.com/stsds/Template-Rcpp (LinkedIn post ) text-utils, https://www.text-utils.com/ time.is, https://time.is/ Tabby, https://tabby.sh Typora, https://typora.io/ uLisp, http://www.ulisp.com/ WebGL, https://get.webgl.org/ WinToHDD, https://www.easyuefi.com/wintohdd/index.html (also easyUEFI ) WSGI, https://wsgi.readthedocs.io/ Yaak, https://yaak.app/ yozosoft, https://www.yozosoft.com/ zotero-gpt, https://github.com/MuiseDestiny/zotero-gpt","title":"Computing resources"},{"location":"Computing/#computing-resources","text":"","title":"Computing resources"},{"location":"Computing/#online-publications","text":"Cambridge English Dictionary, https://dictionary.cambridge.org/dictionary/ Free literature for your device, https://www.planetebook.com/ Handbook of Statistics, https://www.sciencedirect.com/handbook/handbook-of-statistics Merriam-Webster dictionary, https://www.merriam-webster.com/ ManyBooks, https://manybooks.net/ Springer Link, https://link.springer.com/ The book of statistical proofs, https://statproofbook.github.io/","title":"Online publications"},{"location":"Computing/#reference-management","text":"AuthorArranger, https://authorarranger.nci.nih.gov/ citavi, https://www.citavi.com/ EndNote, https://support.clarivate.com/Endnote/s/?language=en_US ( MyEndNoteWeb ) JabRef, https://www.jabref.org/ Lead2Amazon, https://lead.to/amazon/uk/ Mendeley, https://www.mendeley.com/ Papers, https://www.papersapp.com/ ProQuest, https://about.proquest.com/en/ ( https://refworks.proquest.com/ ) PubMed, https://pubmed.ncbi.nlm.nih.gov/ Qiqqa, https://github.com/jimmejardine/qiqqa-open-source Sciwheel, get started ( Google Docs add-on , MicroSoft Word add-on ) Web of science, https://www.webofscience.com/wos/woscc/basic-search Zotero, https://www.zotero.org/ ( https://zbib.org/ )","title":"Reference management"},{"location":"Computing/#organisations","text":"Apache, https://httpd.apache.org/ APKmirror, https://www.apkmirror.com/ ARIA, https://www.aria.org.uk/ ( https://substack.com/@ariaresearch ) CRA, https://cra.org/ CRAN, https://cran.r-project.org/ CS rankings, https://csrankings.org/ CyberChef, https://gchq.github.io/CyberChef/ Google, https://www.google.co.uk/ Adwords keywords planner, https://ads.google.com/home/tools/keyword-planner/ App Engine, https://cloud.google.com/appengine/ Check My Links DeepMind, https://deepmind.google/ Gemini, https://ai.google.dev/ My groups, https://groups.google.com/my-groups Search console, https://search.google.com/search-console/about Translate, https://translate.google.co.uk/ MicroSoft, https://www.microsoft.com/ Account, https://account.live.com/consent/Manage?uaid=00be9fb584174427b1cffb995d934b24 Bing webmaster tool, https://www.bing.com/webmasters/about Bing image creator, https://www.bing.com/images/create/ dotnet, https://dotnet.microsoft.com/en-us/ Forms GatesNotes, https://www.gatesnotes.com/ Hotmail, https://outlook.live.com/owa/ LINQpad, https://www.linqpad.net/ Support, https://support.microsoft.com/en-us Sway, https://sway.office.com/ Windows 11 virtual machines, https://developer.microsoft.com/en-us/windows/downloads/virtual-machines/ Mozilla, https://www.mozilla.org/en-GB/about/ MySQL, https://dev.mysql.com/ Paper with code, https://paperswithcode.com/ Raspberry Pi OS, https://www.raspberrypi.com/software/ Science Media Centre, https://www.sciencemediacentre.org/ Twitter, https://github.com/twitter zenodo, https://zenodo.org/ Western Digital, https://www.westerndigital.com/","title":"Organisations"},{"location":"Computing/#linux-sites","text":"Entroware, https://www.entroware.com/store/index.php Fedora, https://getfedora.org/ GNU parallel, https://www.biostars.org/p/63816/ linuxconfig.org, https://linuxconfig.org/ Linux command, https://wangchujiang.com/linux-command/ mythic-beasts, https://www.mythic-beasts.com/ OBI download, https://www.oracle.com/middleware/technologies/business-intelligence-v12213-downloads.html SRCF, https://www.srcf.net/ (site, https://jhz22.user.srcf.net/ ) VirtualBox, https://www.virtualbox.org/ (all downloads, https://download.virtualbox.org/virtualbox/ )","title":"Linux sites"},{"location":"Computing/#online-documentationsoftwareutilities","text":"Adobe online, https://www.adobe.com/uk/acrobat/online.html Aiseesoft background remover, https://www.aiseesoft.com/online-bg-remover/ apexCharts.js, https://apexcharts.com/ Avalonia Visual Basic6, https://github.com/BAndysc/AvaloniaVisualBasic6 balenaEtcher, https://etcher.balena.io/ Brave, https://brave.com/ brms, https://paul-buerkner.github.io/brms/index.html ( CRAN ) Calibre, https://calibre-ebook.com/ Click, https://click.palletsprojects.com ConvertWizard, https://convertwizard.com/ Dillinger, https://dillinger.io/ Django 5.0 documentation, https://docs.djangoproject.com/en/5.0/ DjVuLibre, http://djvu.sourceforge.net/ djvu.org, http://djvu.org EPUB reader online, https://epub-reader.online/ Equation Editor, https://editor.codecogs.com/ Flapjax, https://www.flapjax-lang.org/ Flask, https://flask.palletsprojects.com/ Foxit, https://www.foxitsoftware.com/ FreeCAD, https://www.freecad.org/ gravis, https://robert-haas.github.io/gravis-docs/index.html HivisionIDPhotos, https://swanhub.co/ZeYiLin/HivisionIDPhotos ( GitHub Icecream PDF Editor, https://icecreamapps.com/PDF-Editor/ iLoveIMG, https://www.iloveimg.com/ iLovePDF, https://www.ilovepdf.com/ IMGonline.com.ua, https://www.imgonline.com.ua/eng/resize-image.php INBO tutorials, https://inbo.github.io/tutorials/ Jinja, https://jinja.palletsprojects.com/ jqplay, https://jqplay.org/ JSEditor, https://jseditor.io/ Krite, https://krita.org/en/ MConverter, https://mconverter.eu/ ( docx2html ) Mermaid, https://mermaid.js.org/ (live editor, https://mermaid.live/ ) MobaXterm, https://mobaxterm.mobatek.net/ nbviewer, https://nbviewer.org/ OddPrints, https://www.oddprints.com/ Oh My Posh, https://ohmyposh.dev/ Online2PDF, https://online2pdf.com/docx2pdf PaddleOCR, https://github.com/PaddlePaddle/PaddleOCR PDF24, https://en.pdf24.org/ PDF2DJVU, https://pdf2djvu.com/ PDFgear, https://www.pdfgear.com/ PDF-XCHANGE, https://www.pdf-xchange.com/ (discontinued viewer ) PNG2PDF, https://png2pdf.com/ PhotoScissors, https://photoscissors.com/ QGIS, https://www.qgis.org/ Quarto, https://quarto.org/ RStudio, https://www.rstudio.com/ download ( Mastering Shiny , shinyapps ) Rufus, https://rufus.ie/en/ Scribus, https://sourceforge.net/projects/scribus/ Speedtest, https://www.speedtest.net/ Template-Rcpp, https://github.com/stsds/Template-Rcpp (LinkedIn post ) text-utils, https://www.text-utils.com/ time.is, https://time.is/ Tabby, https://tabby.sh Typora, https://typora.io/ uLisp, http://www.ulisp.com/ WebGL, https://get.webgl.org/ WinToHDD, https://www.easyuefi.com/wintohdd/index.html (also easyUEFI ) WSGI, https://wsgi.readthedocs.io/ Yaak, https://yaak.app/ yozosoft, https://www.yozosoft.com/ zotero-gpt, https://github.com/MuiseDestiny/zotero-gpt","title":"Online documentation/software/utilities"},{"location":"INSTALL/","text":"Installation notes This section lists software which serve as backbone for a variety of projects including those in genetics. Illustration is given for some under Ubutun except R-devel which is with Fedora whose C/C++ version is higher. Environment modules Web: https://modules.readthedocs.io/en/latest/ It is preferable to allow for installation of multiple applications. The following scripts show how this is done under Ubunto. wget http://archive.ubuntu.com/ubuntu/pool/universe/m/modules/modules_5.2.0.orig.tar.xz tar xf modules_5.2.0.orig.tar.xz cd modules-5.2.0 sudo apt-get install tcl-dev tk-dev ./configure make sudo make install ls /usr/local/Modules source /usr/local/Modules/init/bash sudo ln -s /usr/local/Modules/init/profile.sh /etc/profile.d/modules.sh sudo ln -s /usr/local/Modules/init/profile.csh /etc/profile.d/modules.csh echo -e \"\\n# For initiating Modules\" | sudo tee -a /etc/bash.bashrc > /dev/null # Append a line to the end of this file with no return message. echo \". /etc/profile.d/modules.sh\" | sudo tee -a /etc/bash.bashrc > /dev/null less /usr/local/Modules/init/profile.sh module avail module list According to https://www.microbialsystems.cn/en/post/xubuntu_env_modules/ . Instances at work is shown here, https://cambridge-ceu.github.io/csd3/systems/ceuadmin.html . Armadillo It is available with sudo apt install libarmadillo-dev boost It is installed with sudo apt install libboost-all-dev To install it manually from source, as for a particular version, https://stackoverflow.com/questions/12578499/how-to-install-boost-on-ubuntu wget https://sourceforge.net/projects/boost/files/boost/1.58.0/boost_1_58_0.tar.gz tar xvfz boost_1_58_0.tar.gz cd boost_1_58_0 # ./b2 -h gives more options ./bootstrap.sh --prefix=/scratch/jhz22 ./b2 With a successful built, the following directory is suggested to be added to compiler include paths: boost_1_58_0 The following directory should be added to linker library paths: boost_1_58_0/stage/lib and we can test with example #include #include using namespace std; int main(){ boost::array arr = {{1,2,3,4}}; cout << \"hi\" << arr[0]; return 0; } eigen It is installed with sudo apt install libeigen3-dev GMP/MPFR One can start usual from https://gmplib.org/ and https://www.mpfr.org/. sudo apt install libgmp-dev sudo apt install libmpfr-dev then one can install Rmpfr. When installing as non-Admin, make sure issuing 'make check' for both libraries. As MPFR is dependent on GMP, it is necessary to use cd /home/jhz22/Downloads/mpfr-4.0.1 ./configure --prefix=/scratch/jhz22 --with-gmp-build=/home/jhz22/Downloads/gmp-6.1.2 make check for instance. GSL sudo apt install libgsl-dev JAGS-4.3.0 These are required at least under Federa 28, sudo dnf install automake sudo dnf install lapack-devel sudo dnf install mercurial It is actually available from Ubuntu archive, i.e., sudo apt install jags sudo apt-get install r-cran-rjags We can also work with sourceforge, wget https://sourceforge.net/projects/mcmc-jags/files/JAGS/4.x/Source/JAGS-4.3.0.tar.gz tar xvfz JAGS-4.3.0.tar.gz cd JAGS-4.3.0 LDFLAGS=\"-L/scratch/jhz22/lib64\" ./configure --prefix=/scratch/jhz22 --with-blas=-lblas --with-lapack=-llapack make make install Under MKL, we have #22-7-2014 MRC-Epid JHZ export MKL_NUM_THREAD=15 export MKL=/home/jhz22/intel/composer_xe_2013.4.183/mkl export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$MKLROOT/lib/intel64 /genetics/data/software/intel/composer_xe_2013.4.183/mkl/bin/mklvars.sh intel64 ./configure --prefix=/home/jhz22 --disable-shared --with-lapack \\ --with-blas=\"-fopenmp -m64 -I$MKL/include -L$MKL/lib/intel64 -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lpthread -lm\" make It turns out the easiest to install rjags package is to download it and work manually, e.g., R --no-save < Makevars cd ../.. R CMD INSTALL rjags The rjags package can also be installed as follows, export PKG_CONFIG_PATH=/scratch/jhz22/lib/pkgconfig R CMD INSTALL rjags_4-6.tar.gz --configure-args='CPPFLAGS=\"-fPIC\" LDFLAGS=\"-L/scratch/jhz22/lib -ljags\" --with-jags-prefix=/scratch/jhz22 --with-jags-libdir=/scratch/jhz22/lib --with-jags-includedir=/scratch/jhz22/include' It may still be difficult to install, and we can try manually, tar xfz rjags_4-6.tar.gz cd rjags mv configure configure.bak echo PKG_CPPFLAGS=-fPIC -I/scratch/$USER/include/JAGS > src/Makevars echo PKG_LIBS=-L/scratch/$USER/lib -ljags >> src/Makevars cd - R CMD INSTALL rjags After this, rjags should install as with R2jags. We can also install JAGS-related packages by establishing an Makevars in the src directory, e.g., R --no-save < #ifndef JAGS_MAJOR #define JAGS_MAJOR 4 #endif #define JAGS_MAJOR_FORCED 0 where the Makevars.runjags has the following lines PKG_CPPFLAGS=-I/scratch/jhz22/include PKG_LIBS=-L/scratch/jhz22/lib -ljags OBJECTS= distributions/jags/DFunction.o distributions/jags/DPQFunction.o distributions/jags/PFunction.o distributions/jags/QFunction.o distributions/jags/RScalarDist.o distributions/DPar1.o distributions/DPar2.o distributions/DPar3.o distributions/DPar4.o distributions/DLomax.o distributions/DMouchel.o distributions/DGenPar.o distributions/DHalfCauchy.o runjags.o testrunjags.o To get around these, one can mirror installation of rjags using the fact that runjags simply calls libjags.so though the source seemed for JAGS 3.x.x., export PKG_CONFIG_PATH=/rds-d4/user/jhz22/hpc-work/lib/pkgconfig export LDFLAGS=\"-L/rds-d4/user/jhz22/hpc-work/lib -ljags -lblas -llapack\" R CMD INSTALL runjags_2.0.4-2.tar.gz --configure-args=' --with-jags-prefix=/rds-d4/user/jhz22/hpc-work --with-jags-libdir=/rds-d4/user/jhz22/hpc-work/lib --with-jags-includedir=/rds-d4/user/jhz22/hpc-work/include' but somehow runjags is always points to lib64 for libjags.so, so when libjags.so is in lib instead it is necessary to create symbolic links from lib64. BLAS and LAPACK The pre-built version is straightforward for Fedora with sudo dnf install blas-devel sudo dnf install lapack-devel and the counterpart for Ubuntu is sudo apt install libblas-dev sudo apt install liblapack-dev To install from http://www.netlib.org/lapack/, we proceed as follows, wget http://www.netlib.org/lapack/lapack-3.8.0.tar.gz tar xvfz lapack-3.8.0.tar.gz cd lapack-3.8.0 mkdir build cd build ## ccmake . cmake .. make make install It is necessary to invoke ccmake .. to change the default static to dyanmic library as well as target directory. However, in case this is working, one can proceed as follows, cmake -DCMAKE_INSTALL_PREFIX=/rds-d4/user/jhz22/hpc-work -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=ON -DCBLAS=ON -DLAPACKE=ON .. make make install MKL One can consult Intel\u00ae Math Kernel Library Link Line Advisor and Free access to Intel\u00ae Compilers, Performance libraries, Analysis tools and more... . For instance, it is conviently available from Anaconda, conda install -c intel mkl Example use with R under RHEL, # export OMP_NUM_THREADS=6 export MKL_NUM_THREADS=15 export MKLROOT=/genetics/data/software/intel/composer_xe_2013.4.183/mkl export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$MKLROOT/lib/intel64 /genetics/data/software/intel/composer_xe_2013.4.183/mkl/bin/mklvars.sh intel64 ./configure --prefix=/genetics/data/software --enable-R-shlib --enable-threads=posix --with-lapack \\ --with-blas=\"-fopenmp -m64 -I$MKLROOT/include -L$MKLROOT/lib/intel64 -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lpthread -lm\" make make install and # https://software.intel.com/en-us/articles/build-r-301-with-intel-c-compiler-and-intel-mkl-on-linux# export ICC_OPT=\"-mkl -xHOST -fp-model strict\" export CC=\"icc $ICC_OPT\" export CXX=\"icpc $ICC_OPT\" export FC=\"ifort -mkl -xHOST\" export F77=\"ifort -mkl -xHOST\" export FPICFLAGS=\" -fPIC\" export AR=xiar export LD=xild export MKL=\"-lmkl_gf_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread\" ./configure --prefix=/home/jhz22/R-devel --enable-R-shlib --with-x=no --with-blas=-lmkl LDFLAGS=-L/home/jhz22/lib CPPFLAGS=-I/home/jhz22/include For Windows, see https://software.intel.com/content/www/us/en/develop/documentation/get-started-with-mkl-for-windows/top.html. The benchmark is available from here, https://github.com/pachamaltese/r-with-intel-mkl/blob/master/00-benchmark-scripts/1-r-benchmark-25.R. cd \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\" rename Rblas.dll Rblas.dll.orig rename Rlapack.dll Rlapack.dll.orig cd \"C:\\Program Files (x86)\\IntelSWTools\\compilers_and_libraries\\windows\\redist\\intel64_win\\mkl\" copy mkl_rt.dll \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\\Rblas.dll\" copy mkl_rt.dll \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\\Rlapack.dll\" copy mkl_intel_thread.dll \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\" making this known the PATH. NLopt Available from https://nlopt.readthedocs.io/en/latest/ with R counterpart from https://cran.r-project.org/web/packages/nloptr/index.html. GNU Octave It is available with, sudo apt install octave PSPP Under Ubuntu, this can be made available with sudo apt install pspp For Fedora, we have sudo dnf install pspp which will install libpq, gsl, gtksourceview3, spread-sheet-widget as well, see https://apps.fedoraproject.org/packages/pspp. Two simple SPSS command files example.sps and plot.sps can be called with pspp example.sps psppire plot.sps showing CLI and GUI, respectively. Related utilities are pspp-convert . It is possible to compile it directly by using gtksourceview 4.0.3 (4.4.0 is more demanding with Python 3.5, meson, Vala, etc.) and use PKG_CONFIG_PATH when appropriate spread-sheet-widget-0.3 fribidi-1.0.8 GTKSOURVIEW_CFLAGS and GTKSOURVIEW_LIBS in the configuration. export PREFIX=/rds/user/$USER/hpc-work export GTKSOURCEVIEW_CFLAGS=-I${PREFIX}/includegtksourceview-4 export GTKSOURCEVIEW_LIBS=\"-L${PREFIX}/lib -lgtksourceview-4\" ./configure --prefix=${PREFIX} make make install note that it is necessary to comment on the statement kludge = gtk_source_view_get_type (); from src/ui/gui/widgets.c and to remove the PREFIX= speficiation in the Perl part of compiling, i.e, cd perl-module /usr/bin/perl Makefile.PL PREFIX=/rds/user/$USER/hpc-work OPTIMIZE=\"-g -O2 -I/rds-d4/user/$USER/hpc-work/include/fribidi -I/usr/include/cairo -I/usr/include/glib-2.0 -I/usr/lib64/glib-2.0/include -I/usr/include/pixman-1 -I/usr/include/freetype2 -I/usr/include/libpng15 -I/usr/include/uuid -I/usr/include/libdrm -I/usr/include/pango-1.0 -I/usr/include/harfbuzz \" A more recent description is here, https://cambridge-ceu.github.io/csd3/applications/pspp.html . python A useful resource is code from Pattern Recognition and Machine Learning . It is possible to conduct survival analysis with lifelines , pip install lifelines R Fedora 31 The R-release, including both the compiled and source package, is built as follows, sudo dnf install R sudo dnf install R-devel while the following are necessary to build the development version , sudo dnf install gcc-c++ sudo dnf install gcc-gfortran sudo dnf install pcre-devel sudo dnf install java-1.8.0-openjdk-devel sudo dnf install readline-devel sudo dnf install libcurl-devel sudo dnf install libX11-devel sudo dnf install libXt-devel sudo dnf install bzip2-devel sudo dnf install xz-devel sudo dnf install pandoc sudo dnf install qpdf sudo dnf install texlive-collection-latex sudo dnf install texlive-collection-fontsextra sudo dnf install texinfo-tex sudo dnf install texlive-collection-fontsrecommended sudo dnf install texlive-collection-latexrecommended ./configure This is necessary since gcc 9 is available and required for CRAN package submission , e.g., # R-release to build R CMD build gap # R-devel to check ln -s $HOME/R/R-devel/bin/R $HOME/bin/R-devel R-devel CMD check --as-cran gap_1.1-22.tar.gz For R-devel, these can be used explicitly, export CC=\"/usr/bin/gcc\" export CXX=\"/usr/bin/g++\" export FC=\"/usr/bin/gfortran\" export CFLAGS=\"-g -O2 -Wall -pedantic -mtune=native\" export FFLAGS=\"-g -O2 -mtune=native -Wall -pedantic\" export CXXFLAGS=\"-g -O2 -Wall -pedantic -mtune=native -Wno-ignored-attributes -Wno-deprecated-declarations -Wno-parentheses\" export LDFLAGS=\"-L/usr/lib64\" R-devel CMD INSTALL gap_1.2.tar.gz with check on foreign language calls or R-devel CMD INSTALL --configure-args=\" CC=\\\"/usr/bin/gcc\\\" \\ CXX=\\\"/usr/bin/g++\\\" \\ FC=\\\"/usr/bin/gfortran\\\" \\ CFLAGS=\\\"-g -O2 -Wall -pedantic -mtune=native\\\" \\ FFLAGS=\\\"-g -O2 -mtune=native -Wall -pedantic\\\" \\ CXXFLAGS=\\\"-I/usr/include -g -O2 -Wall -pedantic -mtune=native -Wno-ignored-attributes -Wno-deprecated-declarations -Wno-parentheses\\\" \\ LDFLAGS=\\\"-L/usr/lib64\\\" gap_1.1-26.tar.gz ``` which is more restrictive than the default --as-cran above. A simpler setup is also possible with `~/.R/Makevars`, e.g., ```bash CC = gcc CXX = g++ CXX11 = g++ FC = gfortran F77 = gfortran F90 = gfortran CFLAGS = -std=c99 -I/usr/include -g -O2 -Wall -pedantic -mtune=native -Wno-ignored-attributes -Wno-deprecated-declarations -Wno-parentheses -Wimplicit-function-declaration CXXFLAGS = -std=c++11 Another example is as follows, module load texlive ./configure --prefix=/rds-d4/user/jhz22/hpc-work \\ --enable-R-shlib \\ CPPFLAGS=-I/rds-d4/user/jhz22/hpc-work/include \\ LDFLAGS=-L/rds-d4/user/jhz22/hpc-work/lib On Fedora 35, we see the following messages from R CMD check gap_1.2.3-6.tar.gz , Error(s) in re-building vignettes: ... --- re-building \u2018gap.Rmd\u2019 using rmarkdown Quitting from lines 273-279 (gap.Rmd) Error: processing vignette 'gap.Rmd' failed with diagnostics: X11 font -adobe-helvetica-%s-%s-*-*-%d-*-*-*-*-*-*-*, face 1 at size 5 could not be loaded --- failed re-building \u2018gap.Rmd\u2019 --- re-building \u2018shinygap.Rmd\u2019 using rmarkdown --- finished re-building \u2018shinygap.Rmd\u2019 --- re-building \u2018jss.Rnw\u2019 using Sweave --- finished re-building \u2018jss.Rnw\u2019 SUMMARY: processing the following file failed: \u2018gap.Rmd\u2019 Error: Vignette re-building failed. Execution halted * checking PDF version of manual ... OK * checking HTML version of manual ... NOTE Skipping checking HTML validation: no command 'tidy' found Skipping checking math rendering: package 'V8' unavailable * checking for non-standard things in the check directory ... OK * checking for detritus in the temp directory ... OK * DONE This is resolved by sudo dnf install v8-devel sudo dnf install xorg-x11-fonts* Rscript -e 'install.packages(c(\"shniy\",\"V8\"),repos=\"https://cran.r-project.org\")' Ubuntu 18.04 The R environment is furnished with sudo apt install r-base-core sudo apt install r-base-dev and R_LIBS is set from .bashrc export R_LIBS=/usr/local/lib/R/site-library/ Note that in fact html.start() in R points to /usr/local/lib/R/library/ instead, see below example in MendelianRandomization . To enable R-devel/package building, these are necessary sudo apt install g++ sudo apt install gfortran sudo apt install texlive sudo apt install texlive-fonts-extra sudo apt install texinfo sudo apt install texlive-fonts-recommended sudo apt install libreadline-dev To set up bzip2, lzma/pcre, curl and then R assuming lapack is already installed, # compile shared library Makefile-libbz2_so and then add -FPIC to CC and recompile # bzip2 # make # make install PREFIX=$SHOME # xz # ./configure --prefix=SHOME/xz-5.2.3 # make -j3 # make install # pcre # ./configure --prefix=$SHOME --enable-utf8 # curl # ./configure --prefix=$SHOME --with-ssl # make && make install ./configure --prefix=/scratch/jhz22 --enable-R-shlib CPPFLAGS=\"-I/scratch/jhz22/include\" LDFLAGS=\"-L/scratch/jhz22/lib\" Windows To build packages on Windows, download Rtools from https://cran.r-project.org/ and install to C:\\Rtools rem 22/8/2019 JHZ set path=C:\\Program Files\\R\\R-3.6.1\\bin;c:\\Rtools\\bin;%PATH%;c:\\Rtools\\mingw_64\\bin;c:\\Rtools\\mingw_32\\bin set lib=c:\\Rtools\\mingw_64\\lib;c:\\Rtools\\mingw_32\\include set include=c:\\Rtools\\mingw_64\\include;c:\\Rtools\\mingw_32\\include We can then run R CMD INSTALL --binary gap , say. It seems the --arch x84 option is very useful for using all available RAM; to make sure use call such as D:\\Program Files\\R\\R-3.5.0\\bin\\x64\\R.exe\" . When this fails, remove large objects in your code and start R with --vanilla option. To upgrade R, it is useful to install installr for its updateR() . Package installation CRAN . It is typically done with install.packages() install.packages(\"ggplot2\",INSTALL_opts=\"--library=/usr/local/lib/R/site-library/\") Bioconductor . This is done with biocLite . source(\"https://bioconductor.org/biocLite.R\") biocLite(\"packagename\") From R 3.5 or greater there is BiocManager, if (!requireNamespace(\"BiocManager\", quietly = TRUE)) install.packages(\"BiocManager\") BiocManager::install() See https://bioconductor.org/install/. Lastly, it is possible with devtools::install_bioc() . GitHub . We could set this up via sudo apt install r-cran-devtools . This is then through devtools::install_github() . library(devtools) install_github(\"MRCIEU/TwoSampleMR\",args=\"--library=/usr/local/lib/R/site-library\",force=TRUE) with dedicated location(s); however this is not always the case and an alternative is to use sudo R CMD INSTALL -l $R_LIBS to install into $R_LIBS. It is possible to point to a package, locally or remotely, e.g, install.packages(\"http://cnsgenomics.com/software/gsmr/static/gsmr_1.0.6.tar.gz\",repos=NULL,type=\"source\") whose first argument is a URL. Multiple precision arithmetic . This is modified from notes on SCALLOP-INF analysis. sudo apt install libmpfr-dev R --no-save < install.packages(\"plotly\") --- Please select a CRAN mirror for use in this session --- Error in structure(.External(.C_dotTclObjv, objv), class = \"tclObj\") : [tcl] bad pad value \"2m\": must be positive screen distance. but can be avoided with specificatino of repository. > install.packages(\"plotly\", repos=\"https://cran.r-project.org\") RStudio The distribution has problem loading or creating R script, so it is tempting to install from https://github.com/rstudio/rstudio/. This involves running scripts under directory dependencies/, ./install-dependencies-debian --exclude-qt-sdk and then the following steps, mkdir build cd build cmake .. -DRSTUDIO_TARGET=Desktop -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local/lib/rstudio However, there is error with Java and Java 8 is required, see https://tecadmin.net/install-oracle-java-8-ubuntu-via-ppa/. sudo add-apt-repository ppa:webupd8team/java sudo apt-get update sudo apt-get install oracle-java8-installer sudo apt-get install oracle-java8-set-default java -version However, compile error is still persistent except when dropping the option --exclude-qt-sdk but unloadable. It is therefore recommended to get around with RStudio daily builds, https://dailies.rstudio.com/. SageMath sudo apt install sagemath stan cmdstan is now available from https://github.com/stan-dev/cmdstan along with other repositories there. Interfaces are listed at http://mc-stan.org/users/interfaces/index.html. Information on installing RStan is described here, https://github.com/stan-dev/rstan/wiki/Installing-RStan-on-Linux On our HPC system under gcc 4.8.5 there are error message > library(rstan) Loading required package: ggplot2 Registered S3 methods overwritten by 'ggplot2': method from [.quosures rlang c.quosures rlang print.quosures rlang Loading required package: StanHeaders Error: package or namespace load failed for \u2018rstan\u2019 in dyn.load(file, DLLpath = DLLpath, ...): unable to load shared object '/rds-d4/user/jhz22/hpc-work/R/rstan/libs/rstan.so': /usr/lib64/libstdc++.so.6: version `GLIBCXX_3.4.20' not found (required by /rds-d4/user/jhz22/hpc-work/R/rstan/libs/rstan.so) > q() which can be resolved with module load gcc/5.2.0 before invoking R. For error message C++14 standard requested but CXX14 is not defined we modify $HOME/.R/Makevars as follows, CXX14 = g++ -std=c++1y -fPIC see https://github.com/stan-dev/rstan/issues/569 but adding -fPIC and as in unixOBD below. unixODBC It is quite standard to install, i.e., wget ftp://ftp.unixodbc.org/pub/unixODBC/unixODBC-2.3.7.tar.gz tar xvfz unixODBC-2.3.7.tar.gz cd unixODBC-2.3.7 ./configure --prefix=/scratch/jhz22 make make install There have been many discussions regarding \"C++11 standard requested but CXX11 is not defined\" and this could be fixed with changes to $R_HOME/etc/Makeconf such that CXX11 = g++ -std=c++11 -fPIC then module load gcc/5.2.0 R CMD INSTALL odbc This is necessary for gtx for instance. zlib Try sudo apt-get install libz-dev","title":"INSTALL"},{"location":"INSTALL/#installation-notes","text":"This section lists software which serve as backbone for a variety of projects including those in genetics. Illustration is given for some under Ubutun except R-devel which is with Fedora whose C/C++ version is higher.","title":"Installation notes"},{"location":"INSTALL/#environment-modules","text":"Web: https://modules.readthedocs.io/en/latest/ It is preferable to allow for installation of multiple applications. The following scripts show how this is done under Ubunto. wget http://archive.ubuntu.com/ubuntu/pool/universe/m/modules/modules_5.2.0.orig.tar.xz tar xf modules_5.2.0.orig.tar.xz cd modules-5.2.0 sudo apt-get install tcl-dev tk-dev ./configure make sudo make install ls /usr/local/Modules source /usr/local/Modules/init/bash sudo ln -s /usr/local/Modules/init/profile.sh /etc/profile.d/modules.sh sudo ln -s /usr/local/Modules/init/profile.csh /etc/profile.d/modules.csh echo -e \"\\n# For initiating Modules\" | sudo tee -a /etc/bash.bashrc > /dev/null # Append a line to the end of this file with no return message. echo \". /etc/profile.d/modules.sh\" | sudo tee -a /etc/bash.bashrc > /dev/null less /usr/local/Modules/init/profile.sh module avail module list According to https://www.microbialsystems.cn/en/post/xubuntu_env_modules/ . Instances at work is shown here, https://cambridge-ceu.github.io/csd3/systems/ceuadmin.html .","title":"Environment modules"},{"location":"INSTALL/#armadillo","text":"It is available with sudo apt install libarmadillo-dev","title":"Armadillo"},{"location":"INSTALL/#boost","text":"It is installed with sudo apt install libboost-all-dev To install it manually from source, as for a particular version, https://stackoverflow.com/questions/12578499/how-to-install-boost-on-ubuntu wget https://sourceforge.net/projects/boost/files/boost/1.58.0/boost_1_58_0.tar.gz tar xvfz boost_1_58_0.tar.gz cd boost_1_58_0 # ./b2 -h gives more options ./bootstrap.sh --prefix=/scratch/jhz22 ./b2 With a successful built, the following directory is suggested to be added to compiler include paths: boost_1_58_0 The following directory should be added to linker library paths: boost_1_58_0/stage/lib and we can test with example #include #include using namespace std; int main(){ boost::array arr = {{1,2,3,4}}; cout << \"hi\" << arr[0]; return 0; }","title":"boost"},{"location":"INSTALL/#eigen","text":"It is installed with sudo apt install libeigen3-dev","title":"eigen"},{"location":"INSTALL/#gmpmpfr","text":"One can start usual from https://gmplib.org/ and https://www.mpfr.org/. sudo apt install libgmp-dev sudo apt install libmpfr-dev then one can install Rmpfr. When installing as non-Admin, make sure issuing 'make check' for both libraries. As MPFR is dependent on GMP, it is necessary to use cd /home/jhz22/Downloads/mpfr-4.0.1 ./configure --prefix=/scratch/jhz22 --with-gmp-build=/home/jhz22/Downloads/gmp-6.1.2 make check for instance.","title":"GMP/MPFR"},{"location":"INSTALL/#gsl","text":"sudo apt install libgsl-dev","title":"GSL"},{"location":"INSTALL/#jags-430","text":"These are required at least under Federa 28, sudo dnf install automake sudo dnf install lapack-devel sudo dnf install mercurial It is actually available from Ubuntu archive, i.e., sudo apt install jags sudo apt-get install r-cran-rjags We can also work with sourceforge, wget https://sourceforge.net/projects/mcmc-jags/files/JAGS/4.x/Source/JAGS-4.3.0.tar.gz tar xvfz JAGS-4.3.0.tar.gz cd JAGS-4.3.0 LDFLAGS=\"-L/scratch/jhz22/lib64\" ./configure --prefix=/scratch/jhz22 --with-blas=-lblas --with-lapack=-llapack make make install Under MKL, we have #22-7-2014 MRC-Epid JHZ export MKL_NUM_THREAD=15 export MKL=/home/jhz22/intel/composer_xe_2013.4.183/mkl export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$MKLROOT/lib/intel64 /genetics/data/software/intel/composer_xe_2013.4.183/mkl/bin/mklvars.sh intel64 ./configure --prefix=/home/jhz22 --disable-shared --with-lapack \\ --with-blas=\"-fopenmp -m64 -I$MKL/include -L$MKL/lib/intel64 -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lpthread -lm\" make It turns out the easiest to install rjags package is to download it and work manually, e.g., R --no-save < Makevars cd ../.. R CMD INSTALL rjags The rjags package can also be installed as follows, export PKG_CONFIG_PATH=/scratch/jhz22/lib/pkgconfig R CMD INSTALL rjags_4-6.tar.gz --configure-args='CPPFLAGS=\"-fPIC\" LDFLAGS=\"-L/scratch/jhz22/lib -ljags\" --with-jags-prefix=/scratch/jhz22 --with-jags-libdir=/scratch/jhz22/lib --with-jags-includedir=/scratch/jhz22/include' It may still be difficult to install, and we can try manually, tar xfz rjags_4-6.tar.gz cd rjags mv configure configure.bak echo PKG_CPPFLAGS=-fPIC -I/scratch/$USER/include/JAGS > src/Makevars echo PKG_LIBS=-L/scratch/$USER/lib -ljags >> src/Makevars cd - R CMD INSTALL rjags After this, rjags should install as with R2jags. We can also install JAGS-related packages by establishing an Makevars in the src directory, e.g., R --no-save < #ifndef JAGS_MAJOR #define JAGS_MAJOR 4 #endif #define JAGS_MAJOR_FORCED 0 where the Makevars.runjags has the following lines PKG_CPPFLAGS=-I/scratch/jhz22/include PKG_LIBS=-L/scratch/jhz22/lib -ljags OBJECTS= distributions/jags/DFunction.o distributions/jags/DPQFunction.o distributions/jags/PFunction.o distributions/jags/QFunction.o distributions/jags/RScalarDist.o distributions/DPar1.o distributions/DPar2.o distributions/DPar3.o distributions/DPar4.o distributions/DLomax.o distributions/DMouchel.o distributions/DGenPar.o distributions/DHalfCauchy.o runjags.o testrunjags.o To get around these, one can mirror installation of rjags using the fact that runjags simply calls libjags.so though the source seemed for JAGS 3.x.x., export PKG_CONFIG_PATH=/rds-d4/user/jhz22/hpc-work/lib/pkgconfig export LDFLAGS=\"-L/rds-d4/user/jhz22/hpc-work/lib -ljags -lblas -llapack\" R CMD INSTALL runjags_2.0.4-2.tar.gz --configure-args=' --with-jags-prefix=/rds-d4/user/jhz22/hpc-work --with-jags-libdir=/rds-d4/user/jhz22/hpc-work/lib --with-jags-includedir=/rds-d4/user/jhz22/hpc-work/include' but somehow runjags is always points to lib64 for libjags.so, so when libjags.so is in lib instead it is necessary to create symbolic links from lib64.","title":"JAGS-4.3.0"},{"location":"INSTALL/#blas-and-lapack","text":"The pre-built version is straightforward for Fedora with sudo dnf install blas-devel sudo dnf install lapack-devel and the counterpart for Ubuntu is sudo apt install libblas-dev sudo apt install liblapack-dev To install from http://www.netlib.org/lapack/, we proceed as follows, wget http://www.netlib.org/lapack/lapack-3.8.0.tar.gz tar xvfz lapack-3.8.0.tar.gz cd lapack-3.8.0 mkdir build cd build ## ccmake . cmake .. make make install It is necessary to invoke ccmake .. to change the default static to dyanmic library as well as target directory. However, in case this is working, one can proceed as follows, cmake -DCMAKE_INSTALL_PREFIX=/rds-d4/user/jhz22/hpc-work -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=ON -DCBLAS=ON -DLAPACKE=ON .. make make install","title":"BLAS and LAPACK"},{"location":"INSTALL/#mkl","text":"One can consult Intel\u00ae Math Kernel Library Link Line Advisor and Free access to Intel\u00ae Compilers, Performance libraries, Analysis tools and more... . For instance, it is conviently available from Anaconda, conda install -c intel mkl Example use with R under RHEL, # export OMP_NUM_THREADS=6 export MKL_NUM_THREADS=15 export MKLROOT=/genetics/data/software/intel/composer_xe_2013.4.183/mkl export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$MKLROOT/lib/intel64 /genetics/data/software/intel/composer_xe_2013.4.183/mkl/bin/mklvars.sh intel64 ./configure --prefix=/genetics/data/software --enable-R-shlib --enable-threads=posix --with-lapack \\ --with-blas=\"-fopenmp -m64 -I$MKLROOT/include -L$MKLROOT/lib/intel64 -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lpthread -lm\" make make install and # https://software.intel.com/en-us/articles/build-r-301-with-intel-c-compiler-and-intel-mkl-on-linux# export ICC_OPT=\"-mkl -xHOST -fp-model strict\" export CC=\"icc $ICC_OPT\" export CXX=\"icpc $ICC_OPT\" export FC=\"ifort -mkl -xHOST\" export F77=\"ifort -mkl -xHOST\" export FPICFLAGS=\" -fPIC\" export AR=xiar export LD=xild export MKL=\"-lmkl_gf_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread\" ./configure --prefix=/home/jhz22/R-devel --enable-R-shlib --with-x=no --with-blas=-lmkl LDFLAGS=-L/home/jhz22/lib CPPFLAGS=-I/home/jhz22/include For Windows, see https://software.intel.com/content/www/us/en/develop/documentation/get-started-with-mkl-for-windows/top.html. The benchmark is available from here, https://github.com/pachamaltese/r-with-intel-mkl/blob/master/00-benchmark-scripts/1-r-benchmark-25.R. cd \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\" rename Rblas.dll Rblas.dll.orig rename Rlapack.dll Rlapack.dll.orig cd \"C:\\Program Files (x86)\\IntelSWTools\\compilers_and_libraries\\windows\\redist\\intel64_win\\mkl\" copy mkl_rt.dll \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\\Rblas.dll\" copy mkl_rt.dll \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\\Rlapack.dll\" copy mkl_intel_thread.dll \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\" making this known the PATH.","title":"MKL"},{"location":"INSTALL/#nlopt","text":"Available from https://nlopt.readthedocs.io/en/latest/ with R counterpart from https://cran.r-project.org/web/packages/nloptr/index.html.","title":"NLopt"},{"location":"INSTALL/#gnu-octave","text":"It is available with, sudo apt install octave","title":"GNU Octave"},{"location":"INSTALL/#pspp","text":"Under Ubuntu, this can be made available with sudo apt install pspp For Fedora, we have sudo dnf install pspp which will install libpq, gsl, gtksourceview3, spread-sheet-widget as well, see https://apps.fedoraproject.org/packages/pspp. Two simple SPSS command files example.sps and plot.sps can be called with pspp example.sps psppire plot.sps showing CLI and GUI, respectively. Related utilities are pspp-convert . It is possible to compile it directly by using gtksourceview 4.0.3 (4.4.0 is more demanding with Python 3.5, meson, Vala, etc.) and use PKG_CONFIG_PATH when appropriate spread-sheet-widget-0.3 fribidi-1.0.8 GTKSOURVIEW_CFLAGS and GTKSOURVIEW_LIBS in the configuration. export PREFIX=/rds/user/$USER/hpc-work export GTKSOURCEVIEW_CFLAGS=-I${PREFIX}/includegtksourceview-4 export GTKSOURCEVIEW_LIBS=\"-L${PREFIX}/lib -lgtksourceview-4\" ./configure --prefix=${PREFIX} make make install note that it is necessary to comment on the statement kludge = gtk_source_view_get_type (); from src/ui/gui/widgets.c and to remove the PREFIX= speficiation in the Perl part of compiling, i.e, cd perl-module /usr/bin/perl Makefile.PL PREFIX=/rds/user/$USER/hpc-work OPTIMIZE=\"-g -O2 -I/rds-d4/user/$USER/hpc-work/include/fribidi -I/usr/include/cairo -I/usr/include/glib-2.0 -I/usr/lib64/glib-2.0/include -I/usr/include/pixman-1 -I/usr/include/freetype2 -I/usr/include/libpng15 -I/usr/include/uuid -I/usr/include/libdrm -I/usr/include/pango-1.0 -I/usr/include/harfbuzz \" A more recent description is here, https://cambridge-ceu.github.io/csd3/applications/pspp.html .","title":"PSPP"},{"location":"INSTALL/#python","text":"A useful resource is code from Pattern Recognition and Machine Learning . It is possible to conduct survival analysis with lifelines , pip install lifelines","title":"python"},{"location":"INSTALL/#r","text":"","title":"R"},{"location":"INSTALL/#fedora-31","text":"The R-release, including both the compiled and source package, is built as follows, sudo dnf install R sudo dnf install R-devel while the following are necessary to build the development version , sudo dnf install gcc-c++ sudo dnf install gcc-gfortran sudo dnf install pcre-devel sudo dnf install java-1.8.0-openjdk-devel sudo dnf install readline-devel sudo dnf install libcurl-devel sudo dnf install libX11-devel sudo dnf install libXt-devel sudo dnf install bzip2-devel sudo dnf install xz-devel sudo dnf install pandoc sudo dnf install qpdf sudo dnf install texlive-collection-latex sudo dnf install texlive-collection-fontsextra sudo dnf install texinfo-tex sudo dnf install texlive-collection-fontsrecommended sudo dnf install texlive-collection-latexrecommended ./configure This is necessary since gcc 9 is available and required for CRAN package submission , e.g., # R-release to build R CMD build gap # R-devel to check ln -s $HOME/R/R-devel/bin/R $HOME/bin/R-devel R-devel CMD check --as-cran gap_1.1-22.tar.gz For R-devel, these can be used explicitly, export CC=\"/usr/bin/gcc\" export CXX=\"/usr/bin/g++\" export FC=\"/usr/bin/gfortran\" export CFLAGS=\"-g -O2 -Wall -pedantic -mtune=native\" export FFLAGS=\"-g -O2 -mtune=native -Wall -pedantic\" export CXXFLAGS=\"-g -O2 -Wall -pedantic -mtune=native -Wno-ignored-attributes -Wno-deprecated-declarations -Wno-parentheses\" export LDFLAGS=\"-L/usr/lib64\" R-devel CMD INSTALL gap_1.2.tar.gz with check on foreign language calls or R-devel CMD INSTALL --configure-args=\" CC=\\\"/usr/bin/gcc\\\" \\ CXX=\\\"/usr/bin/g++\\\" \\ FC=\\\"/usr/bin/gfortran\\\" \\ CFLAGS=\\\"-g -O2 -Wall -pedantic -mtune=native\\\" \\ FFLAGS=\\\"-g -O2 -mtune=native -Wall -pedantic\\\" \\ CXXFLAGS=\\\"-I/usr/include -g -O2 -Wall -pedantic -mtune=native -Wno-ignored-attributes -Wno-deprecated-declarations -Wno-parentheses\\\" \\ LDFLAGS=\\\"-L/usr/lib64\\\" gap_1.1-26.tar.gz ``` which is more restrictive than the default --as-cran above. A simpler setup is also possible with `~/.R/Makevars`, e.g., ```bash CC = gcc CXX = g++ CXX11 = g++ FC = gfortran F77 = gfortran F90 = gfortran CFLAGS = -std=c99 -I/usr/include -g -O2 -Wall -pedantic -mtune=native -Wno-ignored-attributes -Wno-deprecated-declarations -Wno-parentheses -Wimplicit-function-declaration CXXFLAGS = -std=c++11 Another example is as follows, module load texlive ./configure --prefix=/rds-d4/user/jhz22/hpc-work \\ --enable-R-shlib \\ CPPFLAGS=-I/rds-d4/user/jhz22/hpc-work/include \\ LDFLAGS=-L/rds-d4/user/jhz22/hpc-work/lib On Fedora 35, we see the following messages from R CMD check gap_1.2.3-6.tar.gz , Error(s) in re-building vignettes: ... --- re-building \u2018gap.Rmd\u2019 using rmarkdown Quitting from lines 273-279 (gap.Rmd) Error: processing vignette 'gap.Rmd' failed with diagnostics: X11 font -adobe-helvetica-%s-%s-*-*-%d-*-*-*-*-*-*-*, face 1 at size 5 could not be loaded --- failed re-building \u2018gap.Rmd\u2019 --- re-building \u2018shinygap.Rmd\u2019 using rmarkdown --- finished re-building \u2018shinygap.Rmd\u2019 --- re-building \u2018jss.Rnw\u2019 using Sweave --- finished re-building \u2018jss.Rnw\u2019 SUMMARY: processing the following file failed: \u2018gap.Rmd\u2019 Error: Vignette re-building failed. Execution halted * checking PDF version of manual ... OK * checking HTML version of manual ... NOTE Skipping checking HTML validation: no command 'tidy' found Skipping checking math rendering: package 'V8' unavailable * checking for non-standard things in the check directory ... OK * checking for detritus in the temp directory ... OK * DONE This is resolved by sudo dnf install v8-devel sudo dnf install xorg-x11-fonts* Rscript -e 'install.packages(c(\"shniy\",\"V8\"),repos=\"https://cran.r-project.org\")'","title":"Fedora 31"},{"location":"INSTALL/#ubuntu-1804","text":"The R environment is furnished with sudo apt install r-base-core sudo apt install r-base-dev and R_LIBS is set from .bashrc export R_LIBS=/usr/local/lib/R/site-library/ Note that in fact html.start() in R points to /usr/local/lib/R/library/ instead, see below example in MendelianRandomization . To enable R-devel/package building, these are necessary sudo apt install g++ sudo apt install gfortran sudo apt install texlive sudo apt install texlive-fonts-extra sudo apt install texinfo sudo apt install texlive-fonts-recommended sudo apt install libreadline-dev To set up bzip2, lzma/pcre, curl and then R assuming lapack is already installed, # compile shared library Makefile-libbz2_so and then add -FPIC to CC and recompile # bzip2 # make # make install PREFIX=$SHOME # xz # ./configure --prefix=SHOME/xz-5.2.3 # make -j3 # make install # pcre # ./configure --prefix=$SHOME --enable-utf8 # curl # ./configure --prefix=$SHOME --with-ssl # make && make install ./configure --prefix=/scratch/jhz22 --enable-R-shlib CPPFLAGS=\"-I/scratch/jhz22/include\" LDFLAGS=\"-L/scratch/jhz22/lib\"","title":"Ubuntu 18.04"},{"location":"INSTALL/#windows","text":"To build packages on Windows, download Rtools from https://cran.r-project.org/ and install to C:\\Rtools rem 22/8/2019 JHZ set path=C:\\Program Files\\R\\R-3.6.1\\bin;c:\\Rtools\\bin;%PATH%;c:\\Rtools\\mingw_64\\bin;c:\\Rtools\\mingw_32\\bin set lib=c:\\Rtools\\mingw_64\\lib;c:\\Rtools\\mingw_32\\include set include=c:\\Rtools\\mingw_64\\include;c:\\Rtools\\mingw_32\\include We can then run R CMD INSTALL --binary gap , say. It seems the --arch x84 option is very useful for using all available RAM; to make sure use call such as D:\\Program Files\\R\\R-3.5.0\\bin\\x64\\R.exe\" . When this fails, remove large objects in your code and start R with --vanilla option. To upgrade R, it is useful to install installr for its updateR() .","title":"Windows"},{"location":"INSTALL/#package-installation","text":"CRAN . It is typically done with install.packages() install.packages(\"ggplot2\",INSTALL_opts=\"--library=/usr/local/lib/R/site-library/\") Bioconductor . This is done with biocLite . source(\"https://bioconductor.org/biocLite.R\") biocLite(\"packagename\") From R 3.5 or greater there is BiocManager, if (!requireNamespace(\"BiocManager\", quietly = TRUE)) install.packages(\"BiocManager\") BiocManager::install() See https://bioconductor.org/install/. Lastly, it is possible with devtools::install_bioc() . GitHub . We could set this up via sudo apt install r-cran-devtools . This is then through devtools::install_github() . library(devtools) install_github(\"MRCIEU/TwoSampleMR\",args=\"--library=/usr/local/lib/R/site-library\",force=TRUE) with dedicated location(s); however this is not always the case and an alternative is to use sudo R CMD INSTALL -l $R_LIBS to install into $R_LIBS. It is possible to point to a package, locally or remotely, e.g, install.packages(\"http://cnsgenomics.com/software/gsmr/static/gsmr_1.0.6.tar.gz\",repos=NULL,type=\"source\") whose first argument is a URL. Multiple precision arithmetic . This is modified from notes on SCALLOP-INF analysis. sudo apt install libmpfr-dev R --no-save < install.packages(\"plotly\") --- Please select a CRAN mirror for use in this session --- Error in structure(.External(.C_dotTclObjv, objv), class = \"tclObj\") : [tcl] bad pad value \"2m\": must be positive screen distance. but can be avoided with specificatino of repository. > install.packages(\"plotly\", repos=\"https://cran.r-project.org\")","title":"Package installation"},{"location":"INSTALL/#rstudio","text":"The distribution has problem loading or creating R script, so it is tempting to install from https://github.com/rstudio/rstudio/. This involves running scripts under directory dependencies/, ./install-dependencies-debian --exclude-qt-sdk and then the following steps, mkdir build cd build cmake .. -DRSTUDIO_TARGET=Desktop -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local/lib/rstudio However, there is error with Java and Java 8 is required, see https://tecadmin.net/install-oracle-java-8-ubuntu-via-ppa/. sudo add-apt-repository ppa:webupd8team/java sudo apt-get update sudo apt-get install oracle-java8-installer sudo apt-get install oracle-java8-set-default java -version However, compile error is still persistent except when dropping the option --exclude-qt-sdk but unloadable. It is therefore recommended to get around with RStudio daily builds, https://dailies.rstudio.com/.","title":"RStudio"},{"location":"INSTALL/#sagemath","text":"sudo apt install sagemath","title":"SageMath"},{"location":"INSTALL/#stan","text":"cmdstan is now available from https://github.com/stan-dev/cmdstan along with other repositories there. Interfaces are listed at http://mc-stan.org/users/interfaces/index.html. Information on installing RStan is described here, https://github.com/stan-dev/rstan/wiki/Installing-RStan-on-Linux On our HPC system under gcc 4.8.5 there are error message > library(rstan) Loading required package: ggplot2 Registered S3 methods overwritten by 'ggplot2': method from [.quosures rlang c.quosures rlang print.quosures rlang Loading required package: StanHeaders Error: package or namespace load failed for \u2018rstan\u2019 in dyn.load(file, DLLpath = DLLpath, ...): unable to load shared object '/rds-d4/user/jhz22/hpc-work/R/rstan/libs/rstan.so': /usr/lib64/libstdc++.so.6: version `GLIBCXX_3.4.20' not found (required by /rds-d4/user/jhz22/hpc-work/R/rstan/libs/rstan.so) > q() which can be resolved with module load gcc/5.2.0 before invoking R. For error message C++14 standard requested but CXX14 is not defined we modify $HOME/.R/Makevars as follows, CXX14 = g++ -std=c++1y -fPIC see https://github.com/stan-dev/rstan/issues/569 but adding -fPIC and as in unixOBD below.","title":"stan"},{"location":"INSTALL/#unixodbc","text":"It is quite standard to install, i.e., wget ftp://ftp.unixodbc.org/pub/unixODBC/unixODBC-2.3.7.tar.gz tar xvfz unixODBC-2.3.7.tar.gz cd unixODBC-2.3.7 ./configure --prefix=/scratch/jhz22 make make install There have been many discussions regarding \"C++11 standard requested but CXX11 is not defined\" and this could be fixed with changes to $R_HOME/etc/Makeconf such that CXX11 = g++ -std=c++11 -fPIC then module load gcc/5.2.0 R CMD INSTALL odbc This is necessary for gtx for instance.","title":"unixODBC"},{"location":"INSTALL/#zlib","text":"Try sudo apt-get install libz-dev","title":"zlib"},{"location":"LANGUAGES/","text":"Language notes This page collects information on Visual Studio Code, C, C++, Fortran, Java, Perl, python and R. Ada Web: https://www.adaic.org/ From hello.adb , with Ada.Text_IO; procedure Hello is begin Ada.Text_IO.Put_Line (\"Hello, World!\"); end Hello; We run gnatmake hello hello BASIC It is still possible to reflect the old language from here, https://sourceforge.net/projects/devos-studio/ . C There have been multiple instances for suggesting migration from tempnam to mkstemp ( /usr/include/stdlib.h under Bash but not Windows) and mktemp -- the following code illustrates its use under both Bash and Windows, Nevertheless these are not standard routines, one still needs to add char *mktemp(char *) for instance. #include #include /* for open flags */ #include /* for PATH_MAX */ int main(void) { static char template[] = \"/tmp/myfileXXXXXX\"; char fname[PATH_MAX]; static char mesg[] = \"Here's lookin' at you, kid!\\n\"; /* beats \"hello, world\" */ int fd; strcpy(fname, template); mktemp(fname); /* RACE CONDITION WINDOW OPENS */ printf(\"Filename is %s\\n\", fname); /* RACE CONDITION WINDOW LASTS TO HERE */ fd = open(fname, O_CREAT|O_RDWR|O_TRUNC, 0600); write(fd, mesg, strlen(mesg)); close(fd); /* unlink(fname); */ return 0; } A script for testing UTF-8 support by PCRE, #include #include #include int main() { int supports_utf8; if (pcre_config (PCRE_CONFIG_UTF8, &supports_utf8)) { fprintf(stderr, \"pcre_config() failed\\n\"); exit(EXIT_FAILURE); } printf(\"UTF-8 is supported: %s\\n\", supports_utf8 ? \"yes\" : \"no\"); exit(EXIT_SUCCESS); } // gcc $(pkg-config --cflags --libs libpcre) pcreutf.c // ./a.out // pcretest -C The following is Timsort implementation, #include #define MIN_RUN 32 // \u63d2\u5165\u6392\u5e8f\u7b97\u6cd5 void insertionSort(int arr[], int left, int right) { for (int i = left + 1; i <= right; i++) { int key = arr[i]; int j = i - 1; while (j >= left && arr[j] > key) { arr[j + 1] = arr[j]; j--; } arr[j + 1] = key; } } // \u5f52\u5e76\u51fd\u6570 void merge(int arr[], int left, int mid, int right) { int len1 = mid - left + 1; int len2 = right - mid; int L[len1], R[len2]; for (int i = 0; i < len1; i++) L[i] = arr[left + i]; for (int j = 0; j < len2; j++) R[j] = arr[mid + 1 + j]; int i = 0, j = 0, k = left; while (i < len1 && j < len2) { if (L[i] <= R[j]) arr[k++] = L[i++]; else arr[k++] = R[j++]; } while (i < len1) arr[k++] = L[i++]; while (j < len2) arr[k++] = R[j++]; } // Timsort \u7b97\u6cd5 void timSort(int arr[], int n) { for (int i = 0; i < n; i += MIN_RUN) insertionSort(arr, i, (i + MIN_RUN - 1) < n ? (i + MIN_RUN - 1) : (n - 1)); for (int size = MIN_RUN; size < n; size *= 2) { for (int left = 0; left < n; left += 2 * size) { int mid = left + size - 1; int right = (left + 2 * size - 1) < (n - 1) ? (left + 2 * size - 1) : (n - 1); merge(arr, left, mid, right); } } } int main() { int arr[] = {12, 11, 13, 5, 6, 7}; int n = sizeof(arr) / sizeof(arr[0]); printf(\"Original array: \"); for (int i = 0; i < n; i++) printf(\"%d \", arr[i]); timSort(arr, n); printf(\"\\nSorted array: \"); for (int i = 0; i < n; i++) printf(\"%d \", arr[i]); return 0; } y gcc timsort.c -o timsort and timsort to get Original array: 12 11 13 5 6 7 Sorted array: 5 6 7 11 12 13 14:40 C++ The use of Google Test is noted here, Web: https://github.com/google/googletest . wget -qO- https://github.com/google/googletest/archive/refs/tags/release-1.11.0.tar.gz | \\ tar xvfz - cd googletest-release-1.11.0 mkdir build && cd build build .. make # amending set(CMAKE_INSTALL_PREFIX \"/rds/user/jhz22/hpc-work\") in `cmake_install.cmake` make install Now it is possible to compile R/glmnet 4.1-3, i.e., find_package(GTest 1.11 CONFIG REQUIRED) of src/glmnetpp/CMakeLists.txt . Fortran Information on modernising Fortran could be very useful in foreign language calls (e.g., R), http://fortranwiki.org/fortran/show/Modernizing+Old+Fortran. Debugging Fortran code gdb https://undo.io/resources/debugging-fortran-code-gdb/ valgrind program segfault1 implicit none real, dimension(10) :: a integer :: i a = 0. do i = 1, 12 a(i) = i print*,a(i) end do end program segfault1 ! gfortran -g -Wall -Wextra -Wimplicit-interface -fPIC -fmax-errors=1 -fcheck=all -fbacktrace segfault1.f90 -o segfault1 ! valgrind --leak-check=full --dsymutil=yes --track-origins=yes ./segfault1 ! MacOS --dsymutil=yes: ! valgrind --leak-check=full --dsymutil=yes --track-origins=yes ./segfault1 Java The IDE of choice is NetBeans (e.g., DEPICT and JAM); however 8.1 from apt install under Ubuntu 18.04 crashes so it is suggested to download directly from https://netbeans.org/downloads/. To enable JDK it is helpful to specify --javahome option. sudo ./netbeans-8.2-linux.sh --javahome /usr/lib/jvm/java-8-oracale or start with netbeans --javahome /usr/lib/jvm/java-8-oracle (more convenient to set alias netbeans='netbeans --javahome /usr/lib/jvm/java-8-oracle' at .bashrc ). NetBeans 9.0 is currently available from https://netbeans.apache.org/download/nb90/; the .zip file can be downloaded and unpacked for use. For software such as cutadapt cython is required, sudo apt install cython JavaScript A current JavaScript/TypeScript interpreter is deno, https://anaconda.org/conda-forge/deno/files . wget https://anaconda.org/conda-forge/deno/1.40.2/download/linux-64/deno-1.40.2-hfc7925d_0.conda -O deno.conda unzip deno.conda tar --use-compress-program=unzstd -xvf pkg-deno-1.40.2-hfc7925d_0.tar.zst deno --version giving deno 1.40.2 (release, x86_64-unknown-linux-gnu) v8 12.1.285.6 typescript 5.3.3 The mermaid diagram is illustrated with mermaid.html using code available from here, https://cdnjs.cloudflare.com/ajax/libs/mermaid/8.0.0/mermaid.min.js . The call can be embedded in markdown document, The hello world example with plotly.js is https://plot.ly/javascript/getting-started/#hello-world-example and the 3D diagram is with 3d-scatter.html based on https://plot.ly/javascript/3d-scatter-plots/ . The base64 encode/decode is with https://www.base64encode.org/ & https://www.base64decode.org/ . Perl sudo perl -MCPAN -e shell install DBI for instance, as used in VEP . Another notable example is circos, http://circos.ca and its Google group , wget -qO- http://www.circos.ca/distribution/circos-current.tgz | \\ tar xvfz - cd circos-0.69-9 bin/circos --modules wget -qO- http://circos.ca/distribution/circos-tutorials-current.tgz | \\ tar xvfz - wget -qO- http://www.circos.ca/distribution/circos-tools-current.tgz | \\ tar xvfz - The following required modules can be installed Config::General (v2.50 or later) Font::TTF GD List::MoreUtils Math::Bezier Math::Round Math::VecStat Params::Validate Readonly Regexp::Common Set::IntSpan (v1.16 or later) Text::Format and we can enter the example/ directory to run its script. The CircosAPI module requires namespace::autoclean , Moose , JSON::PP and String::Util . Python To disable upgrade of pip, add [global] disable-pip-version-check = True option to $HOME/.config/pip/pip.conf To install a particular version of package, e.g., sudo -H pip install pandas==0.20.1 which is required by DEPICT's munge_sumstats.py . Other pip options include uninstall . The python programs in agotron_detector requires MySQL and can be installed as follows, sudo apt-get install python-dev libmysqlclient-dev sudo pip install MySQL-python It is necessary to use --user option without super-user privilege. PyStan is available with pip install pystan which uses matplotlib, https://github.com/matplotlib and Tkinter, established with sudo apt install python-tk or sudo apt install python3-tk . import pystan schools_code = \"\"\" data { int J; // number of schools real y[J]; // estimated treatment effects real sigma[J]; // s.e. of effect estimates } parameters { real mu; real tau; real eta[J]; } transformed parameters { real theta[J]; for (j in 1:J) theta[j] = mu + tau * eta[j]; } model { eta ~ normal(0, 1); y ~ normal(theta, sigma); } \"\"\" schools_dat = {'J': 8, 'y': [28, 8, -3, 7, -1, 1, 18, 12], 'sigma': [15, 10, 16, 11, 9, 11, 10, 18]} sm = pystan.StanModel(model_code=schools_code) fit = sm.sampling(data=schools_dat, iter=1000, chains=4) import matplotlib.pyplot as plt def plotGraph(): fig = fit.plot() # plt.show() # use the save button or the following command, # f.savefig(\"foo.pdf\", bbox_inches='tight') return fig from matplotlib.backends.backend_pdf import PdfPages pp = PdfPages('foo.pdf') f = plotGraph() pp.savefig(f) pp.close() To install jupyter-book, module load python/2.7.10 python -m pip install jupyter-book --user and we can check for $HOME/.local/lib/python2.7/site-packages and start from /home/jhz22/.local/bin. We can install notebook similarly. python -m pip install notebook --user Owing to recent changes, it is more appropriate to use python3 module load python/3.5 export PATH=$PATH:$HOME/.local/bin export PYTHONPATH=/usr/local/Cluster-Apps/python/3.5.1/lib/python3.5/site-packages:$HOME/.local/lib/python3.5/site-packages python3 -m pip install jupyter-book --user To convert from parquet to csv is done as follows, import pandas as pd import pyspark import pyarrow import sys import os fn = sys.argv[1] print(fn) df = pd.read_parquet(fn) outfn = \"\".join(\"GTEx_Analysis_v8_EUR_eQTL_all_associations_csv/\" + os.path.splitext(os.path.basename(fn))[0] + \".csv\") print(outfn) df.to_csv(outfn) R Information on R and RStudio can be seen from installation section of this, https://jinghuazhao.github.io/Computational-Statistics/INSTALL/ . The use of multi-byte string needs specific handling, e.g., # on Bash iconv myfile -f UTF-8 -t ISO-8859-1 -c and # in R Sys.setlocale(\"LC_ALL\", \"C\") See https://stackoverflow.com/questions/4993837/r-invalid-multibyte-string plotly It requires a number of software, sudo dnf install udunits2-devel sudo dnf install cairo-devel sudo dnf install gdal gdal-devel sudo dnf install proj-devel proj-static sudo dnf install geos geos-devel to be followed by install.packages(\"plotly\",depend=TRUE,repos=\"https://cran.r-project.org\") Calls from R Basic examples using OpenMP with R, for C, C++, F77, and Fortran 2003 using Romp, https://github.com/wrathematics/Romp RFI: R to Modern Fortran Interface, https://github.com/t-kalinowski/RFI Stanford Utility Tools for R packages using Fortran, https://bnaras.github.io/SUtools/articles/SUtools.html Package examples for Fortran, * https://cran.r-project.org/web/packages/Delaporte/index.html. * https://cran.r-project.org/web/packages/spam/index.html * https://cran.r-project.org/web/packages/spam64/index.html Documentation * https://www.avrahamadler.com/2018/12/09/the-need-for-speed-part-1-building-an-r-package-with-fortran/ * https://www.avrahamadler.com/2018/12/23/the-need-for-speed-part-2-c-vs-fortran-vs-c/ * https://www.sciencedirect.com/science/article/pii/S2352711018300785?via%3Dihub R packages See https://r-pkgs.org/index.html . shinyapps Web: https://www.shinyapps.io/ , Shiny examples The hello world version is as follows, library(shiny) ui <- fluidPage( \"Hello, world!\" ) server <- function(input, output, session) { } shinyApp(ui, server) Suppose our a directory (called shinyapps here) contains files ui.R and server.R (or combined in app.R ). Go the web site, and register an account with email address. Login from https://www.shinyapps.io/admin/#/login and the following information is available: Step 1 \u2013 Install rsconnect The rsconnect package can be installed directly from CRAN. To make sure you have the latest version run following code in your R console: install.packages('rsconnect') Step 2 \u2013 Authorize Account The rsconnect package must be authorized to your account using a token and secret. To do this, click the copy button below and we'll copy the whole command you need to your clipboard. Just paste it into your console to authorize your account. Once you've entered the command successfully in R, that computer is now authorized to deploy applications to your shinyapps.io account. rsconnect::setAccountInfo(name='your-account', token='your token', secret='your secret') In the future, you can manage your tokens from the Tokens page the settings menu. Step 3 \u2013 Deploy Once the rsconnect package has been configured, you're ready to deploy your first application. If you haven't written any applications yet, you can also checkout the Getting Started Guide for instructions on how to deploy our demo application. Run the following code in your R console. library(rsconnect) rsconnect::deployApp('path/to/your/app') The shiny page is then up as https://your-account.shinyapps.io/shinyapps/ . A more sophisticated Dashboard using the Gapminer dataset is copied here from R-bloggers. library(shiny) library(dplyr) library(purrr) library(gapminder) library(highcharter) ui <- fluidPage( tags$head( tags$link(rel = \"stylesheet\", type = \"text/css\", href = \"styles.css\") ), sidebarLayout( sidebarPanel( titlePanel(\"R Shiny Highcharts\"), selectInput( inputId = \"inContinent\", label = \"Continent:\", choices = unique(gapminder$continent), selected = \"Europe\" ), selectInput( inputId = \"inYearMin\", label = \"Start year:\", choices = unique(gapminder$year)[1:length(unique(gapminder$year)) - 1], selected = min(gapminder$year) ), selectInput( inputId = \"inYearMax\", label = \"End year:\", choices = unique(gapminder$year)[2:length(unique(gapminder$year))], selected = max(gapminder$year) ), width = 3 ), mainPanel( tags$h3(\"Latest stats:\"), tags$div( tags$div( tags$p(\"# Countries:\"), textOutput(outputId = \"outNCountries\") ) %>% tagAppendAttributes(class = \"stat-card\"), tags$div( tags$p(\"Median life exp:\"), textOutput(outputId = \"outMedLifeExp\") ) %>% tagAppendAttributes(class = \"stat-card\"), tags$div( tags$p(\"Median population:\"), textOutput(outputId = \"outMedPop\") ) %>% tagAppendAttributes(class = \"stat-card\"), tags$div( tags$p(\"Median GDP:\"), textOutput(outputId = \"outMedGDP\") ) %>% tagAppendAttributes(class = \"stat-card\") ) %>% tagAppendAttributes(class = \"stat-card-container\"), tags$div( tags$h3(\"Summary stats:\"), tags$div( tags$div( highchartOutput(outputId = \"chartLifeExpByYear\", height = 500) ) %>% tagAppendAttributes(class = \"chart-card\"), tags$div( highchartOutput(outputId = \"chartGDPByYear\", height = 500) ) %>% tagAppendAttributes(class = \"chart-card\"), ) %>% tagAppendAttributes(class = \"base-charts-container\") ) %>% tagAppendAttributes(class = \"card-container\"), tags$div( tags$h3(\"Drilldown:\"), tags$div( highchartOutput(outputId = \"chartDrilldown\", height = 500) ) %>% tagAppendAttributes(class = \"chart-card chart-card-full\") ) %>% tagAppendAttributes(class = \"card-container\"), width = 9 ) %>% tagAppendAttributes(class = \"main-container\") ) ) server <- function(input, output) { data_cards <- reactive({ gapminder %>% filter( continent == input$inContinent, year == max(year) ) %>% summarise( nCountries = n_distinct(country), medianLifeExp = median(lifeExp), medianPopM = median(pop / 1e6), medianGDP = median(gdpPercap) ) }) data_charts <- reactive({ gapminder %>% filter( continent == input$inContinent, between(year, as.integer(input$inYearMin), as.integer(input$inYearMax)) ) %>% group_by(year) %>% summarise( medianLifeExp = round(median(lifeExp), 1), medianGDP = round(median(gdpPercap), 2) ) }) drilldown_chart_base_data <- reactive({ gapminder %>% filter( continent == input$inContinent, year == max(year) ) %>% group_by(country) %>% summarise( pop = round(pop, 1) ) %>% arrange(desc(pop)) }) drilldown_chart_drilldown_data <- reactive({ gapminder %>% filter( continent == input$inContinent, between(year, as.integer(input$inYearMin), as.integer(input$inYearMax)) ) %>% group_nest(country) %>% mutate( id = country, type = \"column\", data = map(data, mutate, name = year, y = pop), data = map(data, list_parse) ) }) output$outNCountries <- renderText({ data_cards()$nCountries }) output$outMedLifeExp <- renderText({ paste(round(data_cards()$medianLifeExp, 1), \"years\") }) output$outMedPop <- renderText({ paste0(round(data_cards()$medianPopM, 2), \"M\") }) output$outMedGDP <- renderText({ paste0(\"$\", round(data_cards()$medianGDP, 2)) }) output$chartLifeExpByYear <- renderHighchart({ hchart(data_charts(), \"column\", hcaes(x = year, y = medianLifeExp), color = \"#0198f9\", name = \"Median life expectancy\") |> hc_title(text = \"Median life expectancy by year\", align = \"left\") |> hc_xAxis(title = list(text = \"Year\")) |> hc_yAxis(title = list(text = \"Life expectancy\")) }) output$chartGDPByYear <- renderHighchart({ hchart(data_charts(), \"line\", hcaes(x = year, y = medianGDP), color = \"#800000\", name = \"Median GDP\") |> hc_title(text = \"Median GDP by year\", align = \"left\") |> hc_xAxis(title = list(text = \"Year\")) |> hc_yAxis(title = list(text = \"GDP\")) }) output$chartDrilldown <- renderHighchart({ hchart( drilldown_chart_base_data(), \"column\", hcaes(x = country, y = pop, drilldown = country), name = \"Population\" ) %>% hc_drilldown( allowPointDrilldown = TRUE, series = list_parse(drilldown_chart_drilldown_data()) ) |> hc_colors(c(\"#004c5f\")) |> hc_title(text = \"Population report\", align = \"left\") |> hc_xAxis(title = list(text = \"\")) |> hc_yAxis(title = list(text = \"Population\")) }) } shinyApp(ui = ui, server = server) and the www/styles.css is here, www/styles.css @import url('https:/s.googleapis.com/css2?family=Poppins:ital,wght@0,700;1,400&display=swap'); * { margin: 0; padding: 0; box-sizing: border-box; } body { -family: 'Poppins', sans-serif; -weight: 400; } .main-container { padding-top: 1rem; } .stat-card-container { display: flex; justify-content: space-between; column-gap: 1rem; } .stat-card { border: 2px solid #f2f2f2; border-bottom: 2px solid #0198f9; width: 100%; padding: 0.5rem 0 0.5rem 1rem; } .stat-card > p { text-transform: uppercase; color: #808080; } .stat-card > div.shiny-text-output { -size: 3rem; -weight: 700; } .card-container { padding-top: 2rem; } .base-charts-container { display: flex; justify-content: space-between; column-gap: 1rem; } .chart-card { border: 2px solid #f2f2f2; width: 50%; } .chart-card-full { width: 100%; } TeX/LaTeX It is most convient to convert Tex/LaTex formulas into MicroSoft Word equtions via pandoc, i.e., pandoc README.md -o README.docx . See https://pandoc.org/ and https://pandoc.org/try/ . For Chinese language support, try sudo apt-get install texlive-latex-base sudo apt-get install latex-cjk-all sudo apt-get install texlive-latex-extra sudo apt-get install texmaker sudo apt-get install texlive-xetex sudo apt-get install texlive-publishers Now change latex to xelatex from Texmaker. \\documentclass{article} \\usepackage(xeCJK} \\begin{document} How are you?\u4f60\u597d\u5417\uff1f \\LaTeX \\end{document} typescript First, create hello.ts with two lines, #!/usr/bin/env ts-node console.log('Hello world!'); and set up the environment, npm install -g npm npm install typescript ts-node -g chmod +x hello.ts hello.ts Visual Studio Code It is available from https://code.visualstudio.com/download , so we could download a .tar.gz file and unpack. There is a pointer from https://github.com/Microsoft/vscode to https://code.visualstudio.com/Download. Once downloaded, it can be installed with sudo dpkg -i code_1.23.1-1525968403_amd64.deb but it requires libgconf-2-4 ; when failed to install use sudo apt --fix-broken install . See https://code.visualstudio.com/docs/python/python-tutorial for the hello world example. ChatGPT We have export OPENAI_API_KEY=$(grep sk ~/doc/OpenAI) , and our first session: import os import openai openai.api_key = os.getenv(\"OPENAI_API_KEY\") if openai.api_key is None: raise ValueError(\"API key not found. Please set the OPENAI_API_KEY environment variable.\") question = input(\"What is your question? \") response = openai.ChatCompletion.create( model=\"gpt-3.5-turbo\", messages=[ {\"role\": \"user\", \"content\": question}, ], max_tokens=512, n=1, stop=None, temperature=0.8, ) print(response) answer = response['choices'][0]['message']['content'] print(\"OpenAI: \" + answer) CodeGPT We need sign up/in for an API key from https://platform.openai.com/overview , e.g., via your MicrsoSoft account. From Visual Studio Code , add extension CodeGPT ; Ctrl-Shit-p to saarch for CodeGPT: Set API Key and enter the key given above from https://platform.openai.com/account/api-keys . Then we create a file, start with a comment, e.g., '# to draw a forest plot' and Ctrl-Shift-i to obtain the code. Here is one of the results given, # First, create a dataset with the relevant values for your forest plot study_names <- c(\"Study A\", \"Study B\", \"Study C\", \"Study D\") odds_ratios <- c(1.2, 1.5, 0.8, 0.6) lo_ci <- c(0.9, 1.0, 0.5, 0.3) hi_ci <- c(1.5, 2.0, 1.2, 0.9) # Combine data into a data frame df <- data.frame(study_names, odds_ratios, lo_ci, hi_ci) # Load the 'meta' package for the forest plot function library(meta) # Create the forest plot forest(df$odds_ratios, ci.lb = df$lo_ci, ci.ub = df$hi_ci, slab = df$study_names, xlab = \"Odds Ratio\", main = \"Forest Plot Example\")","title":"LANGUAGES"},{"location":"LANGUAGES/#language-notes","text":"This page collects information on Visual Studio Code, C, C++, Fortran, Java, Perl, python and R.","title":"Language notes"},{"location":"LANGUAGES/#ada","text":"Web: https://www.adaic.org/ From hello.adb , with Ada.Text_IO; procedure Hello is begin Ada.Text_IO.Put_Line (\"Hello, World!\"); end Hello; We run gnatmake hello hello","title":"Ada"},{"location":"LANGUAGES/#basic","text":"It is still possible to reflect the old language from here, https://sourceforge.net/projects/devos-studio/ .","title":"BASIC"},{"location":"LANGUAGES/#c","text":"There have been multiple instances for suggesting migration from tempnam to mkstemp ( /usr/include/stdlib.h under Bash but not Windows) and mktemp -- the following code illustrates its use under both Bash and Windows, Nevertheless these are not standard routines, one still needs to add char *mktemp(char *) for instance. #include #include /* for open flags */ #include /* for PATH_MAX */ int main(void) { static char template[] = \"/tmp/myfileXXXXXX\"; char fname[PATH_MAX]; static char mesg[] = \"Here's lookin' at you, kid!\\n\"; /* beats \"hello, world\" */ int fd; strcpy(fname, template); mktemp(fname); /* RACE CONDITION WINDOW OPENS */ printf(\"Filename is %s\\n\", fname); /* RACE CONDITION WINDOW LASTS TO HERE */ fd = open(fname, O_CREAT|O_RDWR|O_TRUNC, 0600); write(fd, mesg, strlen(mesg)); close(fd); /* unlink(fname); */ return 0; } A script for testing UTF-8 support by PCRE, #include #include #include int main() { int supports_utf8; if (pcre_config (PCRE_CONFIG_UTF8, &supports_utf8)) { fprintf(stderr, \"pcre_config() failed\\n\"); exit(EXIT_FAILURE); } printf(\"UTF-8 is supported: %s\\n\", supports_utf8 ? \"yes\" : \"no\"); exit(EXIT_SUCCESS); } // gcc $(pkg-config --cflags --libs libpcre) pcreutf.c // ./a.out // pcretest -C The following is Timsort implementation, #include #define MIN_RUN 32 // \u63d2\u5165\u6392\u5e8f\u7b97\u6cd5 void insertionSort(int arr[], int left, int right) { for (int i = left + 1; i <= right; i++) { int key = arr[i]; int j = i - 1; while (j >= left && arr[j] > key) { arr[j + 1] = arr[j]; j--; } arr[j + 1] = key; } } // \u5f52\u5e76\u51fd\u6570 void merge(int arr[], int left, int mid, int right) { int len1 = mid - left + 1; int len2 = right - mid; int L[len1], R[len2]; for (int i = 0; i < len1; i++) L[i] = arr[left + i]; for (int j = 0; j < len2; j++) R[j] = arr[mid + 1 + j]; int i = 0, j = 0, k = left; while (i < len1 && j < len2) { if (L[i] <= R[j]) arr[k++] = L[i++]; else arr[k++] = R[j++]; } while (i < len1) arr[k++] = L[i++]; while (j < len2) arr[k++] = R[j++]; } // Timsort \u7b97\u6cd5 void timSort(int arr[], int n) { for (int i = 0; i < n; i += MIN_RUN) insertionSort(arr, i, (i + MIN_RUN - 1) < n ? (i + MIN_RUN - 1) : (n - 1)); for (int size = MIN_RUN; size < n; size *= 2) { for (int left = 0; left < n; left += 2 * size) { int mid = left + size - 1; int right = (left + 2 * size - 1) < (n - 1) ? (left + 2 * size - 1) : (n - 1); merge(arr, left, mid, right); } } } int main() { int arr[] = {12, 11, 13, 5, 6, 7}; int n = sizeof(arr) / sizeof(arr[0]); printf(\"Original array: \"); for (int i = 0; i < n; i++) printf(\"%d \", arr[i]); timSort(arr, n); printf(\"\\nSorted array: \"); for (int i = 0; i < n; i++) printf(\"%d \", arr[i]); return 0; } y gcc timsort.c -o timsort and timsort to get Original array: 12 11 13 5 6 7 Sorted array: 5 6 7 11 12 13 14:40","title":"C"},{"location":"LANGUAGES/#c_1","text":"The use of Google Test is noted here, Web: https://github.com/google/googletest . wget -qO- https://github.com/google/googletest/archive/refs/tags/release-1.11.0.tar.gz | \\ tar xvfz - cd googletest-release-1.11.0 mkdir build && cd build build .. make # amending set(CMAKE_INSTALL_PREFIX \"/rds/user/jhz22/hpc-work\") in `cmake_install.cmake` make install Now it is possible to compile R/glmnet 4.1-3, i.e., find_package(GTest 1.11 CONFIG REQUIRED) of src/glmnetpp/CMakeLists.txt .","title":"C++"},{"location":"LANGUAGES/#fortran","text":"Information on modernising Fortran could be very useful in foreign language calls (e.g., R), http://fortranwiki.org/fortran/show/Modernizing+Old+Fortran. Debugging Fortran code gdb https://undo.io/resources/debugging-fortran-code-gdb/ valgrind program segfault1 implicit none real, dimension(10) :: a integer :: i a = 0. do i = 1, 12 a(i) = i print*,a(i) end do end program segfault1 ! gfortran -g -Wall -Wextra -Wimplicit-interface -fPIC -fmax-errors=1 -fcheck=all -fbacktrace segfault1.f90 -o segfault1 ! valgrind --leak-check=full --dsymutil=yes --track-origins=yes ./segfault1 ! MacOS --dsymutil=yes: ! valgrind --leak-check=full --dsymutil=yes --track-origins=yes ./segfault1","title":"Fortran"},{"location":"LANGUAGES/#java","text":"The IDE of choice is NetBeans (e.g., DEPICT and JAM); however 8.1 from apt install under Ubuntu 18.04 crashes so it is suggested to download directly from https://netbeans.org/downloads/. To enable JDK it is helpful to specify --javahome option. sudo ./netbeans-8.2-linux.sh --javahome /usr/lib/jvm/java-8-oracale or start with netbeans --javahome /usr/lib/jvm/java-8-oracle (more convenient to set alias netbeans='netbeans --javahome /usr/lib/jvm/java-8-oracle' at .bashrc ). NetBeans 9.0 is currently available from https://netbeans.apache.org/download/nb90/; the .zip file can be downloaded and unpacked for use. For software such as cutadapt cython is required, sudo apt install cython","title":"Java"},{"location":"LANGUAGES/#javascript","text":"A current JavaScript/TypeScript interpreter is deno, https://anaconda.org/conda-forge/deno/files . wget https://anaconda.org/conda-forge/deno/1.40.2/download/linux-64/deno-1.40.2-hfc7925d_0.conda -O deno.conda unzip deno.conda tar --use-compress-program=unzstd -xvf pkg-deno-1.40.2-hfc7925d_0.tar.zst deno --version giving deno 1.40.2 (release, x86_64-unknown-linux-gnu) v8 12.1.285.6 typescript 5.3.3 The mermaid diagram is illustrated with mermaid.html using code available from here, https://cdnjs.cloudflare.com/ajax/libs/mermaid/8.0.0/mermaid.min.js . The call can be embedded in markdown document, The hello world example with plotly.js is https://plot.ly/javascript/getting-started/#hello-world-example and the 3D diagram is with 3d-scatter.html based on https://plot.ly/javascript/3d-scatter-plots/ . The base64 encode/decode is with https://www.base64encode.org/ & https://www.base64decode.org/ .","title":"JavaScript"},{"location":"LANGUAGES/#perl","text":"sudo perl -MCPAN -e shell install DBI for instance, as used in VEP . Another notable example is circos, http://circos.ca and its Google group , wget -qO- http://www.circos.ca/distribution/circos-current.tgz | \\ tar xvfz - cd circos-0.69-9 bin/circos --modules wget -qO- http://circos.ca/distribution/circos-tutorials-current.tgz | \\ tar xvfz - wget -qO- http://www.circos.ca/distribution/circos-tools-current.tgz | \\ tar xvfz - The following required modules can be installed Config::General (v2.50 or later) Font::TTF GD List::MoreUtils Math::Bezier Math::Round Math::VecStat Params::Validate Readonly Regexp::Common Set::IntSpan (v1.16 or later) Text::Format and we can enter the example/ directory to run its script. The CircosAPI module requires namespace::autoclean , Moose , JSON::PP and String::Util .","title":"Perl"},{"location":"LANGUAGES/#python","text":"To disable upgrade of pip, add [global] disable-pip-version-check = True option to $HOME/.config/pip/pip.conf To install a particular version of package, e.g., sudo -H pip install pandas==0.20.1 which is required by DEPICT's munge_sumstats.py . Other pip options include uninstall . The python programs in agotron_detector requires MySQL and can be installed as follows, sudo apt-get install python-dev libmysqlclient-dev sudo pip install MySQL-python It is necessary to use --user option without super-user privilege. PyStan is available with pip install pystan which uses matplotlib, https://github.com/matplotlib and Tkinter, established with sudo apt install python-tk or sudo apt install python3-tk . import pystan schools_code = \"\"\" data { int J; // number of schools real y[J]; // estimated treatment effects real sigma[J]; // s.e. of effect estimates } parameters { real mu; real tau; real eta[J]; } transformed parameters { real theta[J]; for (j in 1:J) theta[j] = mu + tau * eta[j]; } model { eta ~ normal(0, 1); y ~ normal(theta, sigma); } \"\"\" schools_dat = {'J': 8, 'y': [28, 8, -3, 7, -1, 1, 18, 12], 'sigma': [15, 10, 16, 11, 9, 11, 10, 18]} sm = pystan.StanModel(model_code=schools_code) fit = sm.sampling(data=schools_dat, iter=1000, chains=4) import matplotlib.pyplot as plt def plotGraph(): fig = fit.plot() # plt.show() # use the save button or the following command, # f.savefig(\"foo.pdf\", bbox_inches='tight') return fig from matplotlib.backends.backend_pdf import PdfPages pp = PdfPages('foo.pdf') f = plotGraph() pp.savefig(f) pp.close() To install jupyter-book, module load python/2.7.10 python -m pip install jupyter-book --user and we can check for $HOME/.local/lib/python2.7/site-packages and start from /home/jhz22/.local/bin. We can install notebook similarly. python -m pip install notebook --user Owing to recent changes, it is more appropriate to use python3 module load python/3.5 export PATH=$PATH:$HOME/.local/bin export PYTHONPATH=/usr/local/Cluster-Apps/python/3.5.1/lib/python3.5/site-packages:$HOME/.local/lib/python3.5/site-packages python3 -m pip install jupyter-book --user To convert from parquet to csv is done as follows, import pandas as pd import pyspark import pyarrow import sys import os fn = sys.argv[1] print(fn) df = pd.read_parquet(fn) outfn = \"\".join(\"GTEx_Analysis_v8_EUR_eQTL_all_associations_csv/\" + os.path.splitext(os.path.basename(fn))[0] + \".csv\") print(outfn) df.to_csv(outfn)","title":"Python"},{"location":"LANGUAGES/#r","text":"Information on R and RStudio can be seen from installation section of this, https://jinghuazhao.github.io/Computational-Statistics/INSTALL/ . The use of multi-byte string needs specific handling, e.g., # on Bash iconv myfile -f UTF-8 -t ISO-8859-1 -c and # in R Sys.setlocale(\"LC_ALL\", \"C\") See https://stackoverflow.com/questions/4993837/r-invalid-multibyte-string plotly It requires a number of software, sudo dnf install udunits2-devel sudo dnf install cairo-devel sudo dnf install gdal gdal-devel sudo dnf install proj-devel proj-static sudo dnf install geos geos-devel to be followed by install.packages(\"plotly\",depend=TRUE,repos=\"https://cran.r-project.org\") Calls from R Basic examples using OpenMP with R, for C, C++, F77, and Fortran 2003 using Romp, https://github.com/wrathematics/Romp RFI: R to Modern Fortran Interface, https://github.com/t-kalinowski/RFI Stanford Utility Tools for R packages using Fortran, https://bnaras.github.io/SUtools/articles/SUtools.html Package examples for Fortran, * https://cran.r-project.org/web/packages/Delaporte/index.html. * https://cran.r-project.org/web/packages/spam/index.html * https://cran.r-project.org/web/packages/spam64/index.html Documentation * https://www.avrahamadler.com/2018/12/09/the-need-for-speed-part-1-building-an-r-package-with-fortran/ * https://www.avrahamadler.com/2018/12/23/the-need-for-speed-part-2-c-vs-fortran-vs-c/ * https://www.sciencedirect.com/science/article/pii/S2352711018300785?via%3Dihub R packages See https://r-pkgs.org/index.html .","title":"R"},{"location":"LANGUAGES/#shinyapps","text":"Web: https://www.shinyapps.io/ , Shiny examples The hello world version is as follows, library(shiny) ui <- fluidPage( \"Hello, world!\" ) server <- function(input, output, session) { } shinyApp(ui, server) Suppose our a directory (called shinyapps here) contains files ui.R and server.R (or combined in app.R ). Go the web site, and register an account with email address. Login from https://www.shinyapps.io/admin/#/login and the following information is available:","title":"shinyapps"},{"location":"LANGUAGES/#step-1-install-rsconnect","text":"The rsconnect package can be installed directly from CRAN. To make sure you have the latest version run following code in your R console: install.packages('rsconnect')","title":"Step 1 \u2013 Install rsconnect"},{"location":"LANGUAGES/#step-2-authorize-account","text":"The rsconnect package must be authorized to your account using a token and secret. To do this, click the copy button below and we'll copy the whole command you need to your clipboard. Just paste it into your console to authorize your account. Once you've entered the command successfully in R, that computer is now authorized to deploy applications to your shinyapps.io account. rsconnect::setAccountInfo(name='your-account', token='your token', secret='your secret') In the future, you can manage your tokens from the Tokens page the settings menu.","title":"Step 2 \u2013 Authorize Account"},{"location":"LANGUAGES/#step-3-deploy","text":"Once the rsconnect package has been configured, you're ready to deploy your first application. If you haven't written any applications yet, you can also checkout the Getting Started Guide for instructions on how to deploy our demo application. Run the following code in your R console. library(rsconnect) rsconnect::deployApp('path/to/your/app') The shiny page is then up as https://your-account.shinyapps.io/shinyapps/ . A more sophisticated Dashboard using the Gapminer dataset is copied here from R-bloggers. library(shiny) library(dplyr) library(purrr) library(gapminder) library(highcharter) ui <- fluidPage( tags$head( tags$link(rel = \"stylesheet\", type = \"text/css\", href = \"styles.css\") ), sidebarLayout( sidebarPanel( titlePanel(\"R Shiny Highcharts\"), selectInput( inputId = \"inContinent\", label = \"Continent:\", choices = unique(gapminder$continent), selected = \"Europe\" ), selectInput( inputId = \"inYearMin\", label = \"Start year:\", choices = unique(gapminder$year)[1:length(unique(gapminder$year)) - 1], selected = min(gapminder$year) ), selectInput( inputId = \"inYearMax\", label = \"End year:\", choices = unique(gapminder$year)[2:length(unique(gapminder$year))], selected = max(gapminder$year) ), width = 3 ), mainPanel( tags$h3(\"Latest stats:\"), tags$div( tags$div( tags$p(\"# Countries:\"), textOutput(outputId = \"outNCountries\") ) %>% tagAppendAttributes(class = \"stat-card\"), tags$div( tags$p(\"Median life exp:\"), textOutput(outputId = \"outMedLifeExp\") ) %>% tagAppendAttributes(class = \"stat-card\"), tags$div( tags$p(\"Median population:\"), textOutput(outputId = \"outMedPop\") ) %>% tagAppendAttributes(class = \"stat-card\"), tags$div( tags$p(\"Median GDP:\"), textOutput(outputId = \"outMedGDP\") ) %>% tagAppendAttributes(class = \"stat-card\") ) %>% tagAppendAttributes(class = \"stat-card-container\"), tags$div( tags$h3(\"Summary stats:\"), tags$div( tags$div( highchartOutput(outputId = \"chartLifeExpByYear\", height = 500) ) %>% tagAppendAttributes(class = \"chart-card\"), tags$div( highchartOutput(outputId = \"chartGDPByYear\", height = 500) ) %>% tagAppendAttributes(class = \"chart-card\"), ) %>% tagAppendAttributes(class = \"base-charts-container\") ) %>% tagAppendAttributes(class = \"card-container\"), tags$div( tags$h3(\"Drilldown:\"), tags$div( highchartOutput(outputId = \"chartDrilldown\", height = 500) ) %>% tagAppendAttributes(class = \"chart-card chart-card-full\") ) %>% tagAppendAttributes(class = \"card-container\"), width = 9 ) %>% tagAppendAttributes(class = \"main-container\") ) ) server <- function(input, output) { data_cards <- reactive({ gapminder %>% filter( continent == input$inContinent, year == max(year) ) %>% summarise( nCountries = n_distinct(country), medianLifeExp = median(lifeExp), medianPopM = median(pop / 1e6), medianGDP = median(gdpPercap) ) }) data_charts <- reactive({ gapminder %>% filter( continent == input$inContinent, between(year, as.integer(input$inYearMin), as.integer(input$inYearMax)) ) %>% group_by(year) %>% summarise( medianLifeExp = round(median(lifeExp), 1), medianGDP = round(median(gdpPercap), 2) ) }) drilldown_chart_base_data <- reactive({ gapminder %>% filter( continent == input$inContinent, year == max(year) ) %>% group_by(country) %>% summarise( pop = round(pop, 1) ) %>% arrange(desc(pop)) }) drilldown_chart_drilldown_data <- reactive({ gapminder %>% filter( continent == input$inContinent, between(year, as.integer(input$inYearMin), as.integer(input$inYearMax)) ) %>% group_nest(country) %>% mutate( id = country, type = \"column\", data = map(data, mutate, name = year, y = pop), data = map(data, list_parse) ) }) output$outNCountries <- renderText({ data_cards()$nCountries }) output$outMedLifeExp <- renderText({ paste(round(data_cards()$medianLifeExp, 1), \"years\") }) output$outMedPop <- renderText({ paste0(round(data_cards()$medianPopM, 2), \"M\") }) output$outMedGDP <- renderText({ paste0(\"$\", round(data_cards()$medianGDP, 2)) }) output$chartLifeExpByYear <- renderHighchart({ hchart(data_charts(), \"column\", hcaes(x = year, y = medianLifeExp), color = \"#0198f9\", name = \"Median life expectancy\") |> hc_title(text = \"Median life expectancy by year\", align = \"left\") |> hc_xAxis(title = list(text = \"Year\")) |> hc_yAxis(title = list(text = \"Life expectancy\")) }) output$chartGDPByYear <- renderHighchart({ hchart(data_charts(), \"line\", hcaes(x = year, y = medianGDP), color = \"#800000\", name = \"Median GDP\") |> hc_title(text = \"Median GDP by year\", align = \"left\") |> hc_xAxis(title = list(text = \"Year\")) |> hc_yAxis(title = list(text = \"GDP\")) }) output$chartDrilldown <- renderHighchart({ hchart( drilldown_chart_base_data(), \"column\", hcaes(x = country, y = pop, drilldown = country), name = \"Population\" ) %>% hc_drilldown( allowPointDrilldown = TRUE, series = list_parse(drilldown_chart_drilldown_data()) ) |> hc_colors(c(\"#004c5f\")) |> hc_title(text = \"Population report\", align = \"left\") |> hc_xAxis(title = list(text = \"\")) |> hc_yAxis(title = list(text = \"Population\")) }) } shinyApp(ui = ui, server = server) and the www/styles.css is here, www/styles.css @import url('https:/s.googleapis.com/css2?family=Poppins:ital,wght@0,700;1,400&display=swap'); * { margin: 0; padding: 0; box-sizing: border-box; } body { -family: 'Poppins', sans-serif; -weight: 400; } .main-container { padding-top: 1rem; } .stat-card-container { display: flex; justify-content: space-between; column-gap: 1rem; } .stat-card { border: 2px solid #f2f2f2; border-bottom: 2px solid #0198f9; width: 100%; padding: 0.5rem 0 0.5rem 1rem; } .stat-card > p { text-transform: uppercase; color: #808080; } .stat-card > div.shiny-text-output { -size: 3rem; -weight: 700; } .card-container { padding-top: 2rem; } .base-charts-container { display: flex; justify-content: space-between; column-gap: 1rem; } .chart-card { border: 2px solid #f2f2f2; width: 50%; } .chart-card-full { width: 100%; }","title":"Step 3 \u2013 Deploy"},{"location":"LANGUAGES/#texlatex","text":"It is most convient to convert Tex/LaTex formulas into MicroSoft Word equtions via pandoc, i.e., pandoc README.md -o README.docx . See https://pandoc.org/ and https://pandoc.org/try/ . For Chinese language support, try sudo apt-get install texlive-latex-base sudo apt-get install latex-cjk-all sudo apt-get install texlive-latex-extra sudo apt-get install texmaker sudo apt-get install texlive-xetex sudo apt-get install texlive-publishers Now change latex to xelatex from Texmaker. \\documentclass{article} \\usepackage(xeCJK} \\begin{document} How are you?\u4f60\u597d\u5417\uff1f \\LaTeX \\end{document}","title":"TeX/LaTeX"},{"location":"LANGUAGES/#typescript","text":"First, create hello.ts with two lines, #!/usr/bin/env ts-node console.log('Hello world!'); and set up the environment, npm install -g npm npm install typescript ts-node -g chmod +x hello.ts hello.ts","title":"typescript"},{"location":"LANGUAGES/#visual-studio-code","text":"It is available from https://code.visualstudio.com/download , so we could download a .tar.gz file and unpack. There is a pointer from https://github.com/Microsoft/vscode to https://code.visualstudio.com/Download. Once downloaded, it can be installed with sudo dpkg -i code_1.23.1-1525968403_amd64.deb but it requires libgconf-2-4 ; when failed to install use sudo apt --fix-broken install . See https://code.visualstudio.com/docs/python/python-tutorial for the hello world example.","title":"Visual Studio Code"},{"location":"LANGUAGES/#chatgpt","text":"We have export OPENAI_API_KEY=$(grep sk ~/doc/OpenAI) , and our first session: import os import openai openai.api_key = os.getenv(\"OPENAI_API_KEY\") if openai.api_key is None: raise ValueError(\"API key not found. Please set the OPENAI_API_KEY environment variable.\") question = input(\"What is your question? \") response = openai.ChatCompletion.create( model=\"gpt-3.5-turbo\", messages=[ {\"role\": \"user\", \"content\": question}, ], max_tokens=512, n=1, stop=None, temperature=0.8, ) print(response) answer = response['choices'][0]['message']['content'] print(\"OpenAI: \" + answer)","title":"ChatGPT"},{"location":"LANGUAGES/#codegpt","text":"We need sign up/in for an API key from https://platform.openai.com/overview , e.g., via your MicrsoSoft account. From Visual Studio Code , add extension CodeGPT ; Ctrl-Shit-p to saarch for CodeGPT: Set API Key and enter the key given above from https://platform.openai.com/account/api-keys . Then we create a file, start with a comment, e.g., '# to draw a forest plot' and Ctrl-Shift-i to obtain the code. Here is one of the results given, # First, create a dataset with the relevant values for your forest plot study_names <- c(\"Study A\", \"Study B\", \"Study C\", \"Study D\") odds_ratios <- c(1.2, 1.5, 0.8, 0.6) lo_ci <- c(0.9, 1.0, 0.5, 0.3) hi_ci <- c(1.5, 2.0, 1.2, 0.9) # Combine data into a data frame df <- data.frame(study_names, odds_ratios, lo_ci, hi_ci) # Load the 'meta' package for the forest plot function library(meta) # Create the forest plot forest(df$odds_ratios, ci.lb = df$lo_ci, ci.ub = df$hi_ci, slab = df$study_names, xlab = \"Odds Ratio\", main = \"Forest Plot Example\")","title":"CodeGPT"},{"location":"PARALLEL/","text":"Parallel computing Recent notes are available from https://cambridge-ceu.github.io/csd3/systems/ParallelComputing.html . GNU parallel It has home https://www.gnu.org/software/parallel/ -- note especially with its --env to pass environment variables. Under Ubuntu, GNU parallel is easily installed as follows, sudo apt install parallel Earlier version had issues with temporary direvtory, e.g., https://stackoverflow.com/questions/24398941/gnu-parallel-unlink-error if module load parallel/20131222 The latest, http://ftp.gnu.org/gnu/parallel/parallel-latest.tar.bz2 , can be used instead. SGE Sun Grid Engine has a wiki entry . https://peteris.rocks/blog/sun-grid-engine-installation-on-ubuntu-server/ . To delete SGE jobs shown in qstat, use qstat | grep $USER | cut -d\" \" -f1 | xargs qdel Otherwise for a consecutive sequence we use qdel {id1..id2}. SLURM Under Ubuntu, it can be installed with sudo apt install slurm-client General information is available from https://slurm.schedmd.com/ . Job scheduling examples on CentOS 6 and RHEL 7, https://www.arc.ox.ac.uk/arc-systems-0 . command description sacct report job accounting information about active or completed jobs salloc allocate resources for a job in real time (typically used to allocate resources and spawn a shell, in which the srun command is used to launch parallel tasks) sbatch submit a job script for later execution (the script typically contains one or more srun commands to launch parallel tasks) scancel cancel a pending or running job scontrol hold, holdu, release, requeue, requeuehold, suspend and resume commands sinfo reports the state of partitions and nodes managed by Slurm (it has a variety of filtering, sorting, and formatting options) squeue reports the state of jobs (it has a variety of filtering, sorting, and formatting options), by default, reports the running jobs in priority order followed by the pending jobs in priority order srun used to submit a job for execution in real time e.g., squeue -u $USER -r; qstat -u $USER; also scontrol show config; scontrol show partition; scontrol show job [jobid] and sview To see environmental variables, e.g., MaxArraySize, we use scontrol show config | sed -n '/^MaxArraySize/s/.*= *//p' job array, https://slurm.schedmd.com/job_array.html dependency, https://hpc.nih.gov/docs/job_dependencies.html examples, >https://github.com/statgen/SLURM-examples> temporary directories, https://help.rc.ufl.edu/doc/Temporary_Directories When a SLURM job starts, the scheduler creates a temporary directory for the job on the compute node's local hard drive. This $SLURM_TMPDIR directory is very useful for jobs that need to use or generate a large number of small files, as the /ufrc parallel filesystem is optimized for large file streaming and is less suitable for small files. The directory is owned by the user running the job. The path to the temporary directory is made available as the $SLURM_TMPDIR variable. At the end of the job, the temporary directory is automatically removed. You can use the ${SLURM_TMPDIR} variable in job scripts to copy temporary data to the temporary job directory. If necessary, it can also be used as argument for applications that accept a temporary directory argument. Many applications and programming languages use the $TMPDIR environment variable, if available, as the default temporary directory path. If this variable is not set, the applications will default to using the /tmp directory, which is not desirable. SLURM will set $TMPDIR to the same value as $SLURM_TMPDIR unless $TMPDIR has already been set, in which case it will be ignored. Check your job script(s) and shell initialization files like .bashrc and .bash_profile to make sure you do not have $TMPDIR set. If a personal Singularity container is used, make sure that the $SINGULARITYENV_TMPDIR variable is set within the job to export the local scratch location into the Singularity container. Examples of an interactive session can be simply sintr , or srun -N1 -n1 -c6 -p short,medium,long -t 12:0:0 --pty bash -i so that the earliest available partition will be used. SGE to SLURM Conversion is documented at https://srcc.stanford.edu/sge-slurm-conversion . EXAMPLES We intended to convert a large number of PDF files (INTERVAL.*.manhattn.pdf) to PNG with smaller file sizes. To start, we build a file list, ls *pdf | \\ sed 's/INTERVAL.//g;s/.manhattan.pdf//g' > INTERVAL.list We do this with GNU parallel as follows, cat INTERVAL.list | \\ parallel -C' ' ' echo {} pdftopng -r 300 INTERVAL.{}.manhattan.pdf mv {}-000001.png INTERVAL.{}.png ' or with SLURM, #!/bin/bash #SBATCH --ntasks=1 #SBATCH --job-name=pdftopng #SBATCH --time=6:00:00 #SBATCH --cpus-per-task=8 #SBATCH --partition=short #SBATCH --array=1-50 #SBATCH --output=work/pdftopng_%A_%a.out #SBATCH --error=work/pdftopng_%A_%a.err #SBATCH --export ALL . /etc/profile.d/modules.sh module load default-cardio module load slurm module load use.own export p=$(awk 'NR==ENVIRON[\"SLURM_ARRAY_TASK_ID\"]' INTERVAL.list) export TMPDIR=/scratch/jhz22/tmp echo ${p} pdftopng -r 300 INTERVAL.${p}.manhattan.pdf ${p} mv ${p}-000001.png INTERVAL.${p}.png This is a single parameter case and it is possible to allow for more parameters in both cases. Note also that the option --array=1-50 instructs the system to schedule jobs and in jobs with large memory usage it is more preferable to change to --array 1-50%4 so that a maximum of four jobs will be run simultaneously. Job scheduling echo \"ls -l\" | at 01:00 crontab.guru, https://crontab.guru/examples.html","title":"PARALLEL"},{"location":"PARALLEL/#parallel-computing","text":"Recent notes are available from https://cambridge-ceu.github.io/csd3/systems/ParallelComputing.html .","title":"Parallel computing"},{"location":"PARALLEL/#gnu-parallel","text":"It has home https://www.gnu.org/software/parallel/ -- note especially with its --env to pass environment variables. Under Ubuntu, GNU parallel is easily installed as follows, sudo apt install parallel Earlier version had issues with temporary direvtory, e.g., https://stackoverflow.com/questions/24398941/gnu-parallel-unlink-error if module load parallel/20131222 The latest, http://ftp.gnu.org/gnu/parallel/parallel-latest.tar.bz2 , can be used instead.","title":"GNU parallel"},{"location":"PARALLEL/#sge","text":"Sun Grid Engine has a wiki entry . https://peteris.rocks/blog/sun-grid-engine-installation-on-ubuntu-server/ . To delete SGE jobs shown in qstat, use qstat | grep $USER | cut -d\" \" -f1 | xargs qdel Otherwise for a consecutive sequence we use qdel {id1..id2}.","title":"SGE"},{"location":"PARALLEL/#slurm","text":"Under Ubuntu, it can be installed with sudo apt install slurm-client General information is available from https://slurm.schedmd.com/ . Job scheduling examples on CentOS 6 and RHEL 7, https://www.arc.ox.ac.uk/arc-systems-0 . command description sacct report job accounting information about active or completed jobs salloc allocate resources for a job in real time (typically used to allocate resources and spawn a shell, in which the srun command is used to launch parallel tasks) sbatch submit a job script for later execution (the script typically contains one or more srun commands to launch parallel tasks) scancel cancel a pending or running job scontrol hold, holdu, release, requeue, requeuehold, suspend and resume commands sinfo reports the state of partitions and nodes managed by Slurm (it has a variety of filtering, sorting, and formatting options) squeue reports the state of jobs (it has a variety of filtering, sorting, and formatting options), by default, reports the running jobs in priority order followed by the pending jobs in priority order srun used to submit a job for execution in real time e.g., squeue -u $USER -r; qstat -u $USER; also scontrol show config; scontrol show partition; scontrol show job [jobid] and sview To see environmental variables, e.g., MaxArraySize, we use scontrol show config | sed -n '/^MaxArraySize/s/.*= *//p' job array, https://slurm.schedmd.com/job_array.html dependency, https://hpc.nih.gov/docs/job_dependencies.html examples, >https://github.com/statgen/SLURM-examples> temporary directories, https://help.rc.ufl.edu/doc/Temporary_Directories When a SLURM job starts, the scheduler creates a temporary directory for the job on the compute node's local hard drive. This $SLURM_TMPDIR directory is very useful for jobs that need to use or generate a large number of small files, as the /ufrc parallel filesystem is optimized for large file streaming and is less suitable for small files. The directory is owned by the user running the job. The path to the temporary directory is made available as the $SLURM_TMPDIR variable. At the end of the job, the temporary directory is automatically removed. You can use the ${SLURM_TMPDIR} variable in job scripts to copy temporary data to the temporary job directory. If necessary, it can also be used as argument for applications that accept a temporary directory argument. Many applications and programming languages use the $TMPDIR environment variable, if available, as the default temporary directory path. If this variable is not set, the applications will default to using the /tmp directory, which is not desirable. SLURM will set $TMPDIR to the same value as $SLURM_TMPDIR unless $TMPDIR has already been set, in which case it will be ignored. Check your job script(s) and shell initialization files like .bashrc and .bash_profile to make sure you do not have $TMPDIR set. If a personal Singularity container is used, make sure that the $SINGULARITYENV_TMPDIR variable is set within the job to export the local scratch location into the Singularity container. Examples of an interactive session can be simply sintr , or srun -N1 -n1 -c6 -p short,medium,long -t 12:0:0 --pty bash -i so that the earliest available partition will be used.","title":"SLURM"},{"location":"PARALLEL/#sge-to-slurm","text":"Conversion is documented at https://srcc.stanford.edu/sge-slurm-conversion .","title":"SGE to SLURM"},{"location":"PARALLEL/#examples","text":"We intended to convert a large number of PDF files (INTERVAL.*.manhattn.pdf) to PNG with smaller file sizes. To start, we build a file list, ls *pdf | \\ sed 's/INTERVAL.//g;s/.manhattan.pdf//g' > INTERVAL.list We do this with GNU parallel as follows, cat INTERVAL.list | \\ parallel -C' ' ' echo {} pdftopng -r 300 INTERVAL.{}.manhattan.pdf mv {}-000001.png INTERVAL.{}.png ' or with SLURM, #!/bin/bash #SBATCH --ntasks=1 #SBATCH --job-name=pdftopng #SBATCH --time=6:00:00 #SBATCH --cpus-per-task=8 #SBATCH --partition=short #SBATCH --array=1-50 #SBATCH --output=work/pdftopng_%A_%a.out #SBATCH --error=work/pdftopng_%A_%a.err #SBATCH --export ALL . /etc/profile.d/modules.sh module load default-cardio module load slurm module load use.own export p=$(awk 'NR==ENVIRON[\"SLURM_ARRAY_TASK_ID\"]' INTERVAL.list) export TMPDIR=/scratch/jhz22/tmp echo ${p} pdftopng -r 300 INTERVAL.${p}.manhattan.pdf ${p} mv ${p}-000001.png INTERVAL.${p}.png This is a single parameter case and it is possible to allow for more parameters in both cases. Note also that the option --array=1-50 instructs the system to schedule jobs and in jobs with large memory usage it is more preferable to change to --array 1-50%4 so that a maximum of four jobs will be run simultaneously.","title":"EXAMPLES"},{"location":"PARALLEL/#job-scheduling","text":"echo \"ls -l\" | at 01:00 crontab.guru, https://crontab.guru/examples.html","title":"Job scheduling"},{"location":"REPRODUCE/","text":"Reproducible research a.k.a Literate programming. bookdown . CWEB . Jupyter . knitr . noweb and its Tufts site . Pweave and ptangle . Sweave and Stangle . An attempt to model reproducibility is Conceptualizing Reproducibility Using Simulations and Theory ( CRUST ). bookmarks for PDF To install for Ubuntu, sudo snap install pdftk # version 2.02-4, or sudo apt install pdftk-java # version 3.2.2-1 and for Fedora, sudo dnf install pdftk-java . pdftk src.pdf dump_data output bookmarks.txt pdftk target.pdf update_info bookmarks.txt output target-bm.pdf quarto This is extensively documented under Linux, https://cambridge-ceu.github.io/csd3/applications/quarto.html . Under Windows, however it is simpler which involves these steps Install R, e.g., R-4.3.1 Optionally, one installs RStudio or Visual Studio Code (Extension Quarto available, ctrl-+/- to enlarge/shrink fonts). Install python from https://www.python.org/downloads/ rem Program files\\quarto\\bin\\tools deno upgrade py -m pip install tinytex py -m pip install jupyter py -m pip install numpy py -m pip install matplotlib Install quarto, e.g., quato-1.3.450, from https://quarto.org Now we intend to render matplotlib.qmd adapted from https://quarto.org , rendered by quarto render matplotlib.qmd . --- title: \"matplotlib demo\" format: html: code-fold: true jupyter: python3 --- For a demonstration of a line plot on a polar axis, see @fig-polar. ```{python} #| label: fig-polar #| fig-cap: \"A line plot on a polar axis\" import numpy as np import matplotlib.pyplot as plt r = np.arange(0, 2, 0.01) theta = 2 * np.pi * r fig, ax = plt.subplots( subplot_kw = {'projection': 'polar'} ) ax.plot(theta, r) ax.set_rticks([0.5, 1, 1.5, 2]) ax.grid(True) plt.show() ``` Reference Devezer B, Nardin LG, Baumgaertner B, Buzbas EO. Scientific discovery in a model-centric framework: Reproducibility, innovation, and epistemic diversity. PLoS One . 2019 May 15;14(5):e0216125. doi: 10.1371/journal.pone.0216125. eCollection 2019.","title":"REPRODUCE"},{"location":"REPRODUCE/#reproducible-research","text":"a.k.a Literate programming. bookdown . CWEB . Jupyter . knitr . noweb and its Tufts site . Pweave and ptangle . Sweave and Stangle . An attempt to model reproducibility is Conceptualizing Reproducibility Using Simulations and Theory ( CRUST ).","title":"Reproducible research"},{"location":"REPRODUCE/#bookmarks-for-pdf","text":"To install for Ubuntu, sudo snap install pdftk # version 2.02-4, or sudo apt install pdftk-java # version 3.2.2-1 and for Fedora, sudo dnf install pdftk-java . pdftk src.pdf dump_data output bookmarks.txt pdftk target.pdf update_info bookmarks.txt output target-bm.pdf","title":"bookmarks for PDF"},{"location":"REPRODUCE/#quarto","text":"This is extensively documented under Linux, https://cambridge-ceu.github.io/csd3/applications/quarto.html . Under Windows, however it is simpler which involves these steps Install R, e.g., R-4.3.1 Optionally, one installs RStudio or Visual Studio Code (Extension Quarto available, ctrl-+/- to enlarge/shrink fonts). Install python from https://www.python.org/downloads/ rem Program files\\quarto\\bin\\tools deno upgrade py -m pip install tinytex py -m pip install jupyter py -m pip install numpy py -m pip install matplotlib Install quarto, e.g., quato-1.3.450, from https://quarto.org Now we intend to render matplotlib.qmd adapted from https://quarto.org , rendered by quarto render matplotlib.qmd . --- title: \"matplotlib demo\" format: html: code-fold: true jupyter: python3 --- For a demonstration of a line plot on a polar axis, see @fig-polar. ```{python} #| label: fig-polar #| fig-cap: \"A line plot on a polar axis\" import numpy as np import matplotlib.pyplot as plt r = np.arange(0, 2, 0.01) theta = 2 * np.pi * r fig, ax = plt.subplots( subplot_kw = {'projection': 'polar'} ) ax.plot(theta, r) ax.set_rticks([0.5, 1, 1.5, 2]) ax.grid(True) plt.show() ```","title":"quarto"},{"location":"REPRODUCE/#reference","text":"Devezer B, Nardin LG, Baumgaertner B, Buzbas EO. Scientific discovery in a model-centric framework: Reproducibility, innovation, and epistemic diversity. PLoS One . 2019 May 15;14(5):e0216125. doi: 10.1371/journal.pone.0216125. eCollection 2019.","title":"Reference"},{"location":"SYSTEMS/","text":"Systems This is a skeleton to list items to be detailed in the near future. FreeDOS and Linux FreeDOS is available from http://www.freedos.org/ . FreeDOS is an open source DOS-compatible operating system that you can use to play classic DOS games, run legacy business software, or develop embedded systems. Any program that works on MS-DOS should also run on FreeDOS. It is notable that v1.3 provides liveCD and liteUSB which could be useful. Here describes how to convert VMDK format to iso, https://www.ilovefreesoftware.com/26/featured/how-to-convert-vmdk-to-iso-in-windows.html , e.g., qemu-img convert -f vmdk FD13LITE.VMDK pd.raw dd if=pd.raw of=pd.iso We can then use rufus, https://rufus.ie/ , to generate a bootable USB allowing for disk partition by fdisk -- in fact rufus itself can produce a bootable USB nevertheless with no utilities. This is useful to install Linux on very old computers, e.g., reorganise hard drive and then install Fedora from a liveUSB generated from Fedora Media Writer, https://getfedora.org/en/workstation/download/ . There are multiple routes to install particular Linux software; one may prefer to install them as standable but it may also come handy use mini-environments such as Anaconda, Miniconda, Linuxbrew or those already in system (e.g. Ubuntu) archive. A rich source of tips are in the-art-of-command-line and awesome-shell . The following command gives bit information (32 or 64) getconf LONG_BIT The LSB (Linux Standard Base) and distribution information is given with lsb_release -a Under Ubuntu, this could be made available with sudo apt-get install lsb-core . Under Fedora, you may be prompted to install package redhat-lsb-core . Related commands are uname -a and lscpu . The CPU speed can be seen with watch -n.1 \"cat /proc/cpuinfo | grep \\\"^[c]pu MHz\\\"\" The screen utility is operated as follows, screen -S screen -ls screen -r An introduction to Linux Access Control Lists (ACLs), https://www.redhat.com/sysadmin/linux-access-control-lists . To enable color with nano, try find /usr/share/nano/ -iname \"*.nanorc\" -exec echo include {} \\; >> ~/.nanorc Fedora This has already been covered above, i.e., https://getfedora.org/workstation/download/ . Ubuntu Releases are available from http://releases.ubuntu.com and packages are listed at https://packages.ubuntu.com/ . Often it is helpful to run sudo apt update sudo apt upgrade to be in line with the current release; one can check for updates with sudo apt list --upgradable . To resize a virtual disk, follow steps here, https://blog.surges.eu/virtualbox-how-to-solve-the-issue-with-low-disk-space/ . The nautilus desktop can be reset with sudo apt install gnome-tweak-tool gnome-tweaks nautilus-desktop The Unity desktop can be installed with sudo apt-get install unity-session sudo dpkg-reconfigure lightdm Non-root installation may be necessary, e.g., apt download gir1.2-webkit-3.0 apt -i gir1.2-webkit-3.0_2.4.11-3ubuntu3_amd64.deb --force-not-root --root=$HOME Alternatively, we use source package, typically apt-get source package cd package ./configure --prefix=$HOME make make install To unpack a package, one can do dpkg -x package.deb dir When a package URL is available, we can use wget, ar x, xz -d, tar xvf combination to do so. Its archive, http://archive.ubuntu.com/ubuntu/pool/universe , includes beagle, eigensoft, plink, plink-1.9, among others, which can be installed canonically with sudo apt install . sudo apt-get install libcanberra-gtk3-module It is useful to use sudo apt install tasksel , then one can use sudo tasksel . One would get error message such as \"You must put some \u2018source\u2019 URIs in your sources.list\" which can be done as follows sudo apt-get update sudo nano /etc/apt/sources.list # uncomment deb-src here apt-get source hello The system hibernation can be done with sudo systemctl hibernate . Some operations on gnome extensions are given below, sudo apt install gnome-shell-extensions sudo apt search gnome-shell-extension gnome-shell --help sudo apt install gnome-shell-extension-prefs # https://extensions.gnome.org/extension/307/dash-to-dock/ sudo gnome-extensions install dash-to-dockmicxgx.gmail.com.v71.shell-extension.zip sudo apt install gnome-tweaks Installation of dash-to-dock from GitHub proceeds as follows, git clone https://github.com/micheleg/dash-to-dock sudo apt install gettext make -C dash-to-dock install Note that gnome-tweak-tool used earlier on has been updated to gnome-tweaks , which removes gnome shell extensions support from version 40 and it is possible with gnome-shell-extension-prefs . Then the Extension apps enables 'dash-on-dock' as with user themes and Glassy/Glassy-dark through gnome-tweaks by unpacking the relevant files to /usr/share/themes . For WhiteSur, various operations are given below, # https://www.gnome-look.org/p/1403328/ # https://github.com/vinceliuice/WhiteSur-gtk-theme mkdir ~/.themes mkdir ~/.icons tar fvzx WhiteSur-gtk-theme-2022-02-21.tar.gz cd WhiteSur-gtk-theme-2022-02-21/ ./install.sh -s 220 sudo apt install dbus-x11 ./tweaks.sh -d tar xfz WhiteSur-icon-theme-2022-03-18.tar.gz cd WhiteSur-icon-theme-2022-03-18/ /install.sh cd ~/.icons # https://github.com/vinceliuice/WhiteSur-icon-theme # https://www.gnome-look.org/p/1405756/ tar xf 01-WhiteSur.tar.xz sudo apt install dconf-editor dconf-editor & gsettings set org.gnome.shell.extensions.dash-to-dock extend-height false gsettings set org.gnome.shell.extensions.dash-to-dock dock-position BOTTOM gsettings set org.gnome.shell.extensions.dash-to-dock transparency-mode FIXED gsettings set org.gnome.shell.extensions.dash-to-dock dash-max-icon-size 64 gsettings set org.gnome.shell.extensions.dash-to-dock unity-backlit-items true from dconf-editor, follow navigation \"org > gnome > shell > extensions > dash-to-dock\" to customise, see here . Finally, perhaps more appealing is the gnome extension dash-to-panel . Here is information on file sharing, https://www.c-sharpcorner.com/article/how-to-share-files-between-ubuntu-and-windows-10/ Oracle VirtualBox 7.1.4 This requires Visual Studio C++ 2.19, which can be downloaded, e.g. https://aka.ms/vs/17/release/vc_redist.x64.exe . Windows 7 To use VirtualBox under Windows 7, one needs to enable virtualisation within security section of BIOS setup. To find out system info, one can run systeminfo A useful tip is from https://blog.csdn.net/xz360717118/article/details/67638548 Failed to instantiate CLSID_VirtualBox w/ IVirtualBox, but CLSID_VirtualBox w/ IUnknown works. 2017\u5e7403\u670828\u65e5 16:50:30 \u4e00\u53ea\u732a\u513f\u866b \u9605\u8bfb\u6570 17551 \u6807\u7b7e\uff1a virtualbox win7 \u66f4\u591a \u4e2a\u4eba\u5206\u7c7b\uff1a \u670d\u52a1\u5668 \u7248\u6743\u58f0\u660e\uff1a\u672c\u6587\u4e3a\u535a\u4e3b\u539f\u521b\u6587\u7ae0\uff0c\u672a\u7ecf\u535a\u4e3b\u5141\u8bb8\u4e0d\u5f97\u8f6c\u8f7d\u3002 https://blog.csdn.net/xz360717118/article/details/67638548 \u6211\u662f win7 64\u4f4d \u89e3\u51b3\u529e\u6cd5\uff1a 1\uff0c win+r \u5feb\u6377\u952e\u6253\u5f00 \u201c\u8fd0\u884c\u201d\uff0c\u8f93\u5165regedit \u6253\u5f00\u6ce8\u518c\u8868 2\uff0c\u627e\u5230 HKEY_CLASSES_ROOT\\CLSID\\{00020420-0000-0000-C000-000000000046} InprocServer32 \u4fee\u6539 \u7b2c\u4e00\u884c\uff08\u9ed8\u8ba4\uff09\u7684\u503c\u4e3a C:\\Windows\\system32\\oleaut32.dll 3\uff0c\u627e\u5230HKEY_CLASSES_ROOT\\CLSID\\{00020424-0000-0000-C000-000000000046} InprocServer32 \u4fee\u6539 \u7b2c\u4e00\u884c\uff08\u9ed8\u8ba4\uff09\u7684\u503c\u4e3a C:\\Windows\\system32\\oleaut32.dll 4\uff0c\u5b8c\u4e8b\u3002\uff08\u6211\u4fee\u6539\u4e86\u5b8c\u4e86 \u4e5f\u6ca1\u8bd5\u7528\uff0c\u76f4\u63a5\u91cd\u542f\u7535\u8111 \u7136\u540e\u6210\u529f\u4e86\uff09 Actually, there is no need to reboot Windows at Step 4. It is possible that installation of Ubuntu could be freezed, in which case a proposal was to proceed with disabling 3D Acceleration and increasing the number of CPUs to 2-4, see https://www.maketecheasier.com/fix-ubuntu-freezing-virtualbox/ . However, our experiment showed that one can enable 3D Acceleration and two CPUs for installation but it is necessary to disable 3D Acceleration and reset CPU to be one for a system with one CPU after installation. The system informatino can be obtained with systeminfo command as described above. Fedora & shared folders The guest additions under Fedora 28 is furnished with sudo dnf update sudo dnf install gcc kernel-devel kernel-headers dkms make bzip2 perl cd /run/media/jhz22/VBox_GAs_5.2.12/ sudo ./VBoxLinuxAdditions.run To set up shared folders and enforce shared clipboard for bidirectional copy between Linux and Windows, # shared folders sudo mount -t vboxsf -o uid=jhz22 C /home/jhz22/C sudo mount -t vboxsf -o uid=jhz22 D /home/jhz22/D # shared clipboard killall VBoxClient sudo VBoxClient-all Another attempt is through VBoxMange, e.g., VBoxManage.exe sharedfolder add \"22.04\" --name U --hostpath \"U:\\\" . For Fedora 31, see https://www.if-not-true-then-false.com/2010/install-virtualbox-guest-additions-on-fedora-centos-red-hat-rhel/ . See https://www.nakivo.com/blog/make-virtualbox-full-screen/ on full-screen size, in particular, \"\\Program Files\\Oracle\\VirtualBox\\VBoxManage\" setextradata \"32\" VBoxInternal2/EfiGraphicsResolution 1920x1080 for virtual machine 32. Compression Here are the steps, quoting http://www.netreliant.com/news/8/17/Compacting-VirtualBox-Disk-Images-Linux-Guests.html , for compressing large .vdi: # Linux dd if=/dev/zero of=zerofillfile bs=1M rem Windows path D:\\Program Files\\Oracle\\VirtualBox VBoxManage modifyhd --compact \"ubuntu18.04.vdi\" OVA file This is useful, e.g., Windows 11 development environment . Import the OVA file into Oracle VirtualBox: Open Oracle VirtualBox. Select FileImport Appliance. Click Folder to browse to the directory where the OVA file was downloaded. Select the Okta Access Gateway OVA file, and then click Open. Click Next. See https://help.okta.com/oag/en-us/content/topics/access-gateway/deploy-ovb.htm . Windows 11 Official site From the official ISO location, https://aka.ms/DownloadWindows11 , select \"Download Windows 11 Disk Image (ISO) for x64 devices.\". Verify your download after download, https://www.microsoft.com/en-us/software-download/windows11 , from Windows PowerShell, Get-FileHash D:\\Downloads\\Win11_23H2_EnglishInternational_x64v2.iso Algorithm Hash Path --------- ---- ---- SHA256 705AC061688FFD7F5721DA844D01DF85433856EAFAA8441ECE94B270685CA2DB D:\\Downloads\\Win11_23H2_EnglishInternational_x64v2.iso and also Get-FileHash D:\\Downloads\\Win11_24H2_EnglishInternational_x64.iso Algorithm Hash Path --------- ---- ---- SHA256 D5A4C97C3E835C43B1B9A31933327C001766CE314608BA912F2FFFC876044309 D:\\Downloads\\Win11_24H2_EnglishInternational_x64.iso as listed here, https://learn.microsoft.com/en-us/powershell/module/microsoft.powershell.utility/?view=powershell-7.4 . Hash values for the ISO files for Each Language Country Locale Hash Code Arabic 64-bit 97ED2DF27DEBE5A8E94FB37BE4383EB8516D5C24117793BDA5C1E13568D3F754 Bulgarian 64-bit 1C1BD6E96290521B7E4F814ACA30C2CC4C7FAB1E3076439418673B90954A1FFC Chinese Simplified 64-bit BD1ECBA89BC59B7B62EF12C88C4E70D456EDAC10B969ECB3299EBD458B1F0FB3 Chinese Traditional 64-bit B92C3D6D428D12CF78A8D287B8FB28FFBC44D4A36B74C1B121C3CF443DC67ED2 Croatian 64-bit 95C7008AB1B0BA65212A36FB728AC93B9D7611400A78393FE7B319ED5B74AC5C Czech 64-bit 76120E535DB895D0EA69F47D55B836940821352A010DEDFCBCFC1E22E619FC4B Danish 64-bit D5D34DFFE45BA557581ADA14AC025DB8AA58989D054706061B7D0E5B47F5FDB9 Dutch 64-bit EE9BC545673D8F954A1EDAC691D749438D3E4DFA10A75D2519F79E3708D79FDC English 64-bit B56B911BF18A2CEAEB3904D87E7C770BDF92D3099599D61AC2497B91BF190B11 English International 64-bit D5A4C97C3E835C43B1B9A31933327C001766CE314608BA912F2FFFC876044309 Estonian 64-bit D12DC03FA15A7F436A800692E9BA30DBDDAA4CD6122DD71719A2898E953B5407 Finnish 64-bit D6057E058021A9DF8A02B7BB16331D88C38E8BB63D5AD897D094E0DF6C6ADB5A French 64-bit B73AA55DB50D2AD348F61C6537DA05C0D6DED78A143763454E977BE85B444119 French Canadian 64-bit 3CA47351DAD16BD3F7AFA27CDEB321DD726B79859DE8D2769359C7621DE38EC4 German 64-bit 96E036F1219F9EE59F96312CE43EC7DF093E768383A77132750271940926A013 Greek 64-bit 92248F9F5A8735337D4B0DCCE4DD13348F8718858590FD9D1EB00020B5AFB33A Hebrew 64-bit 27D8090B9266A2DAC04E403FE63E46ADE661A5661BF26CA5EBB1A2F13245E86F Hungarian 64-bit 7B58807592AEB2FC5DDF5AF9749FA023CE9165AA4A1BF4F4741709F8AA2EE9D3 Italian 64-bit D95EC65EC06B4036835C7571FE0108159848D2883EF5DA3A67E480130B1F5862 Japanese 64-bit A1E1BCB6C014F39E4A324EC24DB1B745EE62617D29A450BF7B2596A3DCCECD7D Korean 64-bit 63ED86ADFC53F464649478F931EAE39A42DB3FD86C266C9B5AF7F8E19D318C51 Latvian 64-bit F4C2BF7C16576E6D631070D7B7CF6F55E8359D0729B571C570DC6F39D77D9EA6 Lithuanian 64-bit 23B14643B0AD6FDD0231EA201C5E1B000912EE3A0542F1B1F6907DB470AE7D7B Norwegian 64-bit 6CBD6C3FED9CE08AF85420F19D01C287FC58EC0C42DC7409D1D5C341CEB6492B Polish 64-bit 654273603A945EBA3B185FD5D2C22207A0EB788B5E3402F71E6D0839B3026943 Brazilian Portuguese 64-bit 1BC63E9C62FE3EB7E46778F24C790933770FA7430304583BBDF96B47A5D61F1F Portuguese 64-bit FAAFEDFC301A381B0712FC8DB9F0A16ADE2716B998DD4855D0A38172A9A87AB4 Romanian 64-bit 8B23AD43DF35EB75FADDFDFC85D616A001A4D72C757E5286011E3DC9452A5862 Russian 64-bit D0FBDB93864BF6C8ADE844473C9600EBB031C8BB656A272C736E45DFBD9B3BC9 Serbian Latin 64-bit 566047460EEA2F0E0D36E7A378DCFEEA79D7D3C0328227646BE4AB9AC39A9E36 Slovak 64-bit 5B77F2B5F7C77ABF68E628AC37A8841BB1058B7173C1C76DC5A5F6C5BBA855FD Slovenian 64-bit 73F0DC7CC15885F565C76D78D54E4E4D9934720FFE583B52EFDDA2E2457402D7 Spanish 64-bit 708AF7C9AC63B7EB045CA9B196568758B6C1749E8D13CADE61FAACBC7C66D142 Spanish (Mexico) 64-bit BA7A9B9A2052DEF8C24BCB88C76A47B2E6A6C6EE547EED226B9702C5C63AFC69 Swedish 64-bit F3674D377253E2D12635FDEAB76193E80BD80C56A41D10AB9CCEFDB0CDF1AD82 Thai 64-bit CAD5590347376103E369D7E04941B94C037F4CC6C75D81DC50E194FFA87C8CF8 Turkish 64-bit A47E054FE0B762F5B48D08E2B6F768F4B1CA0BC6DB831A76E829F92E578483AF Ukrainian 64-bit 68BABB954E4BFDF8A03ABC188D5120CF12D4DCA3CAE68EB1BFD4B64F872826E2 Some changes as from October 2024, Microsoft Windows [Version 10.0.22631.4249] (c) Microsoft Corporation. All rights reserved. C:\\Windows\\System32>reg.exe delete \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\CompatMarkers\" /f 2>NUL The operation completed successfully. C:\\Windows\\System32>reg.exe delete \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\Shared\" /f 2>NUL The operation completed successfully. C:\\Windows\\System32>reg.exe delete \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\TargetVersionUpgradeExperienceIndicators\" /f 2>NUL The operation completed successfully. C:\\Windows\\System32>reg.exe add \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\HwReqChk\" /f /v HwReqChkVars /t REG_MULTI_SZ /s , /d \"SQ_SecureBootCapable=TRUE,SQ_SecureBootEnabled=TRUE,SQ_TpmVersion=2,SQ_RamMB=8192,\" The operation completed successfully. C:\\Windows\\System32> C:\\Windows\\System32>reg.exe add \"HKLM\\SYSTEM\\Setup\\MoSetup\" /f /v AllowUpgradesWithUnsupportedTPMOrCPU /t REG_DWORD /d 1 The operation completed successfully. C:\\Windows\\System32> See also ZDNET, https://www.zdnet.com/article/how-to-upgrade-your-incompatible-windows-10-pc-to-windows-11/ & Rufus, https://rufus.ie/en/ . It is reported that the changes above have been implemented in Rufus 4.6. Build on the fly git clone https://github.com/AveYo/MediaCreationTool.bat cd MediaCreationTool.bat MediaCreationTool.bat and also https://massgrave.dev/windows_11_links , https://github.com/Raphire/Win11Debloat . Further information CloneVDI is described in this thread, https://forums.virtualbox.org/viewtopic.php?f=6&t=22422 . Moreover, https://www.maketecheasier.com/shrink-your-virtualbox-vm/ and http://bytefreaks.net/windows/reclaim-empty-space-from-virtualbox-vdi-disk-images-by-shrinking . vdi.md as in GWAS-2017 and now listed in GDCT Since one may allocate only part of RAM to VirtualBox, it is often necessary to run program under MS-DOS, e.g., sections on DEPICT. Additional note: 6.1.4 has problem with its Guest Additions. To enable copy/paste through clipboard one can use VBoxGuestAdditions_6.1.97-136310.iso as discused here, https://www.virtualbox.org/ticket/19336. When VirtualBox failed to start a session, one can enter MS-DOS prompt as adminstrator, and issue command bcdedit /set hypervisorlaunchtype off and restart the computer. When there is error message VirtualBox Failed to open session for Virtual Machine , then right click the machine and Discard Saved state . Otherwise, uninstall VirtualBox and reinstall. VirtualBox 7.x.x is considerably easier to set up. SystemRescue See https://www.system-rescue.org/ Windows Hyper-V Web page: https://docs.microsoft.com/en-us/virtualization/hyper-v-on-windows/quick-start/enable-hyper-v Enable Hyper-V to create virtual machines on Windows 10. Hyper-V can be enabled in many ways including using the Windows 10 control panel, PowerShell or using the Deployment Imaging Servicing and Management tool (DISM). This documents walks through each option: Control Panel --> Programs --> Programs and Features --> Turn Windows Features on or off : Hyper-V , Virtual Machine Platforms . WSL Official page: https://github.com/microsoft/WSL To check version of Windows, issue winver (MS-DOS Prompt: Windows + r, cmd, winver). Installation and setup A description on PowerShell is here, https://learn.microsoft.com/en-us/windows/wsl/install . To initiate from PowerShell, use dism /online /enable-feature /feature-name:Microsoft-Windows-Subsystem-Linux /all /norestart dism /online /enable-feature /featurename:VirtualMachinePlatform /all /norestart After installation, it can be invoked from a MS-DOS Prompt with wsl -help wsl --list wsl --list --online wsl --list --verbose wsl --distribution Ubuntu wsl --set-default Ubuntu-20.04 The command could also takes additional parameters, e.g., -d debian. One can also create a desktop entry pointing to C:\\Windows\\system32\\wsl.exe. Location of the distros are found by cd %LocalAppData%\\Packages\\ cd CanonicalGroupLimited.UbuntuonWindows_79rhkp1fndgsc For old Windoes build, there is complaint about writing BIOS then an update called wsl_update_x64.msi is required. A comparison of WSL 1 and WSL 2 can be seen from https://aka.ms/wsl2 ( https://learn.microsoft.com/en-us/windows/wsl/compare-versions ). To migrate from WSL 1 to WSL 2, several options are possible, # A distribution only wsl --set-version Ubuntu 2 wsl --set-version Ubuntu-20.04 2 # WSL as a whole wsl --set-default-version 2 and return with error messages, Conversion in progress, this may take a few minutes... For information on key differences with WSL 2 please visit https://aka.ms/wsl2 The requested operation could not be completed due to a virtual disk system limitation. Virtual hard disk files must be uncompressed and unencrypted and must not be sparse. To fix this, as in https://logfetch.com/wsl2-uncompressed/ , open up File Explorer and navigate to: C:\\Users\\YOUR_USER\\AppData\\Local\\Packages\\CanonicalGroupLimited... Right click on LocalState , then Properties , then Advanced . Ensure Compress contents to save disk space and Encrypt contents to secure data are both deselected. Click OK , then Apply , then Apply changes to this folder only Software mobaXterm, https://mobaxterm.mobatek.net/ offers Advanced WSL settings , Graphical environments such as Gnome-desktop/LDXE-desktop/XFCE4-desktop so as to create sessions using graphical desktops. The corresponding installations are ubuntu-gnome-desktop (gnome-session-bin), lubuntu-desktop , xubuntu-desktop (in fact xfce4-session, xubuntu-core, xubuntu-default-settings), respectively. After started, create a session for WSL, open it and install software. sudo apt update sudo apt install nautilus sudo apt install firefox etc. By default C:\\ is /mnt/c. To establish other drives, one can do sudo mkdir /mnt/d sudo mount -t drvfs D: /mnt/d sudo umount /mnt/d The network drive can also be mapped, e.g., sudo mkdir /mnt/u sudo mount -t drvfs '\\\\me-filer1.medschl.cam.ac.uk\\home$\\jhz22' /mnt/u sudo mount -t drvfs U: /mnt/u See https://www.cyberciti.biz/faq/ubuntu-linux-install-gnome-desktop-on-server/ . See also https://www.makeuseof.com/tag/linux-desktop-windows-subsystem/ . ubuntu-wsl is a set of WSL utilities, which could be installed with sudo apt install ubuntu-wsl for executables at /usr/bin , e.g., wslvar PATH for Windows' \\%PATH environment variable and wslsys for basic information, wslusc to create a short cut on Windows desktop. GUI apps This is available from https://learn.microsoft.com/en-us/windows/wsl/tutorials/gui-apps . Example applications include gedit, gimp, nautilus, vlc, x11-apps, microsoft-edge ( https://www.microsoftedgeinsider.com/en-us/download?platform=linux-deb ). In case of problems, try Intel\u00ae Driver & Support Assistant (Intel\u00ae DSA), https://www.intel.co.uk/content/www/uk/en/support/detect.html . These files are described https://ubuntu.com/tutorials/install-ubuntu-on-wsl2-on-windows-11-with-gui-support#1-overview showing octave: We start with octave --gui and open scripts julia.m and juliatest.m , select run and then save file and run for figure julia.png , . The flowblade GUI is available with sudo apt-get install flowblade . After installation, one can start with export PYTHONPATH=/usr/share/flowblade/Flowblade:/usr/share/flowblade/Flowblade/vieweditor/:/usr/share/flowblade/Flowblade/tools export SHORTCUTS_PATH=/usr/share/flowblade/Flowblade/res /usr/bin/flowblade or cd /bin;./flowblade , https://github.com/jliljebl/flowblade/issues/857 . xfce4 & rdp xfce4 can also be made available with sudo apt update sudo apt install xorg sudo apt install xfce4 echo xfce4-session > ~/.xsession xfce4-session & or work with xrdp: sudo apt install xrdp sudo cp /etc/xrdp/xrdp.ini /etc/xrdp/xrdp.ini.bak sudo sed -i 's/port=3389/port=3390/' /etc/xrdp/xrdp.ini sudo /etc/init.d/xrdp restart One can add line sudo service xrdp start to ~/.bashrc . Moreover from a start up directory such as %UserProfile%, create a file named .wslconfig with lines such as processors=4 memory=4GB swap=4GB localhostForwarding=true The remote desktop can be started from DOS Prompt mstsc for localhost:3390 (127.0.0.1:3390). Programs such as FireFox can be started. To avoid running the service from every terminal session, amend the service xrdp as follows, export plus=$(service --status-all 2>&1 | grep xrdp | cut -d' ' -f3) if [[ \"$plus\" != \"+\" ]]; then echo $plus sudo service xrdp start fi Earlier note: We use check IPv4 address from Windows as follows with systeminfo (or ipconfig ) and start rdp to :3390. To avoid a dark screen, select Applications --> Settings --> Light Locker Settings --> Automatically lock the screen --> Never , Apply . One may also execute wsl --shutdown to reninitialize. Compression of disk wsl --shutdown diskpart select vdist file=\"D:\\wsl\\Ubuntu-2204\\ext4.vhdx\" compact vdisk echo \"%LocalAppData%\\Packages\\CanonicalGroupLimited.UbuntuonWindows_79rhkp1fndgsc\\\\LocalState\\ext4.vhdx\" select vdisk file=\"C:\\Users\\User\\AppData\\Local\\Packages\\CanonicalGroupLimited.UbuntuonWindows_79rhkp1fndgsc\\LocalState\\ext4.vhdx\" compact vdisk Downloading specific distributions The list is available from https://learn.microsoft.com/en-us/windows/wsl/install-manual but alternatives are also possible. # Ubuntu 21.10 wget https://cloud-images.ubuntu.com/releases/impish/release/ubuntu-21.10-server-cloudimg-amd64-wsl.rootfs.tar.gz mkdir d:\\wsl wsl --import Ubuntu-2110 d:/WSL/Ubuntu-2110 d:/Downloads/ubuntu-21.10-server-cloudimg-amd64-wsl.rootfs.tar.gz wsl -d Ubuntu-2110 adduser jhz22 usermod -aG sudo jhz22 su jhz22 ln -s /mnt/c C ln -s /mnt/d D ln -s /mnt/f F # restart wsl -d Ubuntu-2110 -u jhz22 # Ubuntu 22.04 # https://cloud-images.ubuntu.com/releases/22.04/release-20220923/ubuntu-22.04-server-cloudimg-arm64-wsl.rootfs.tar.gz Windows applications To start firefox or Chrome, here is a simple way, cmd.exe /c start https://github.com echo cmd.exe /c start https://github.com > ${HOME}/bin/edge chmod +x ${HOME}/bin/edge edge It is possible with default applications under Windows, e.g., cmd.exe /c u:/work/eQTL-MR.pptx which opens up PowerPoint directly. One can actually generalize these, e.g., ln -s $HOME/C/Program\\ Files\\ \\(x86\\)/Adobe/Acrobat\\ Reader\\ DC/Reader/AcroRd32.exe /home/$USER/bin/AcroRd32.exe ln -s $HOME/bin/AcroRd32.exe /home/$USER/bin/xpdf followed by a call to AcroRd32.exe and as xpdf , or directly call a list of programs: calc.exe , comp.exe , control.exe , curl.exe , fc.exe , find.exe , finger.exe , mspaint.exe , net.exe , sort.exe , tar.exe , whoami.exe , write.exe , xcopy.exe . Uninstallation This is achieved by # via wsl wsl --unregister Ubuntu-20.04 # via wslconfig wslconfig /u Ubuntu-20.04 Anaconda Once installed, it is customary to make several channels accessible, conda config --add channels defaults conda config --add channels conda-forge conda config --add channels bioconda Package in conda-forge include boost, django, glpk, gnuplot, go, gperf, hdf5, ipython, jquery, julia, jupyter, keras, limix, mercurial, miktex, mysql, nano, numpy, pandas, sage, scikit-learn, zlib. Packages in bioconda includes amos, bcftools, beagle, bedops, bedtools, blast, bowtie, bowtie2, bwa, chromhmm, circos, deeptools, emmix, ensembl-vep, fastlmm, fastqc, gatk, gatk4, hclust2, himmer, himmer2, hisat2, igv, impute2, lofreq, mapsplice, mrbayes, ms, nanostat, paml, pbgzip, phylip, picard, plink, plink2, r-wgcna, rsem, rtg-tools, sambamba, samtools, seqkt, sequana, snpeff, snpsift, sra-tools, star, stringtie, tabix, tophat, ucsc-blat, ucsc-liftover, vcftools. For instance, to install intervaltree as required by depict, the following is sufficience, conda install intervaltree All the packages installed can be seen with conda list . To install java, run following command conda install -c anaconda openjdk Other installations include perl, R. Note that conda under Windows is in typically D:/ProgramData/Anaconda2/Library/bin. Altogether we really need to set path=%path%;D:/ProgramData/Anaconda2;D:/ProgramData/Anaconda2/Library/bin Miniconda is available from https://conda.io/miniconda.html . Installation from scratch, wget https://repo.anaconda.com/archive/Anaconda2-2019.03-Linux-ppc64le.sh sh Anaconda2-2019.03-Linux-ppc64le.sh # do not activate at startup conda config --set auto_activate_base false export PYTHONPATH=/scratch/jhz22/lib/python2.7/site-packages/ Examine .bashrc for changes. See https://docs.anaconda.com/anaconda/user-guide/getting-started/ to get started. GitHub See physalia for information. It has been suggested to display math using the following premium in a GitHub page, A current repository on GitHub is here, https://cambridge-ceu.github.io/GitHub-matters/ . mercurial This is associated with the familiar hg command as used for instance by qctool . It is the executable file for Mercurial source code management system, sudo apt install mercurial libraOffice sudo add-apt-repository ppa:libreoffice/ppa sudo apt-get update sudo apt-get install libreoffice Linuxbrew Follow http://linuxbrew.sh/ and possibly https://docs.brew.sh sudo apt-get install build-essential sh -c \"$(curl -fsSL https://raw.githubusercontent.com/Linuxbrew/install/master/install.sh)\" echo 'export PATH=\"/home/linuxbrew/.linuxbrew/bin:$PATH\"' >>~/.profile echo 'export MANPATH=\"/home/linuxbrew/.linuxbrew/share/man:$MANPATH\"' >>~/.profile echo 'export INFOPATH=\"/home/linuxbrew/.linuxbrew/share/info:$INFOPATH\"' >>~/.profile PATH=\"/home/linuxbrew/.linuxbrew/bin:$PATH\" AWS http://aws.amazon.com/ The Open Guide to Amazon Web Services . E.g., https://sites.google.com/site/jpopgen/wgsa/create-an-aws-account https://sites.google.com/site/jpopgen/wgsa/launch-an-instance https://sites.google.com/site/jpopgen/wgsa/terminate-an-instance modules It is a system that allows you to easily change between different versions of compilers and other software. Here explains how to set up globally, # https://www.microbialsystems.cn/en/post/xubuntu_env_modules/ wget http://archive.ubuntu.com/ubuntu/pool/universe/m/modules/modules_5.2.0.orig.tar.xz xz -d modules_5.2.0.orig.tar.xz tar xvf modules_5.2.0.orig.tar cd modules-5.2.0 sudo apt-get install tcl-dev tk-dev ./configure make sudo make install ls /usr/local/Modules source /usr/local/Modules/init/bash sudo ln -s /usr/local/Modules/init/profile.sh /etc/profile.d/modules.sh sudo ln -s /usr/local/Modules/init/profile.csh /etc/profile.d/modules.csh echo -e \"\\n# For initiating Modules\" | sudo tee -a /etc/bash.bashrc > /dev/null # Append a line to the end of this file with no return message. echo \". /etc/profile.d/modules.sh\" | sudo tee -a /etc/bash.bashrc > /dev/null less /usr/local/Modules/init/profile.sh module avail module list Modification can be seen, e.g., function module () { curl -sf -XPOST http://modules-mon.hpc.cam.ac.uk/action -H 'Content-Type: application/json' -d '{ \"username\":\"'$USER'\", \"hostname\":\"'$HOSTNAME'\", \"command\":\"'\"$*\"'\" }' 2>&1 > /dev/null; eval `/usr/bin/modulecmd bash $*` } module load matlab/r2014a matlab $@ Usually the eval line is sufficient. docker See https://www.docker.com/ and https://docs.docker.com/ # https://docs.docker.com/engine/install/ubuntu/ for pkg in docker.io docker-doc docker-compose docker-compose-v2 podman-docker containerd runc; do sudo apt-get remove $pkg; done # Add Docker's official GPG key: sudo apt-get update sudo apt-get install ca-certificates curl sudo install -m 0755 -d /etc/apt/keyrings sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc sudo chmod a+r /etc/apt/keyrings/docker.asc # Add the repository to Apt sources: echo \\ \"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \\ $(. /etc/os-release && echo \"$VERSION_CODENAME\") stable\" | \\ sudo tee /etc/apt/sources.list.d/docker.list > /dev/null sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin For instance, git clone https://docs.docker.com/engine/install/ubuntu/ docker run --rm -it neoaggelos/knotify /knotify/bin/rna_analysis --sequence AAAAAACUAAUAGAGGGGGGACUUAGCGCCCCCCAAACCGUAACCCC giving CCCAAACCGUAACCCC Sequence: AAAAAACUAAUAGAGGGGGGACUUAGCGCCCCCCAAACCGUAACCCC Structure: ..............((((((.....[[[))))))....]]]...... Energy: -13.800000190734863 Duration: 0.297946 s By default, Docker is only accessible with root privileges (sudo). As a regular user, add your user to the docker group. sudo addgroup --system docker sudo adduser $USER docker newgrp docker sudo snap disable docker sudo snap enable docker For Fedora 33, we have udo dnf -y install dnf-plugins-core sudo dnf config-manager --add-repo https://download.docker.com/linux/fedora/docker-ce.repo sudo dnf install docker-ce docker-ce-cli containerd.io sudo systemctl start docker sudo docker pull eqtlcatalogue/susie-finemapping:v20.08.1 sudo docker run eqtlcatalogue/susie-finemapping:v20.08.1 # remove docker engine # sudo dnf remove docker-ce docker-ce-cli containerd.io # removed all images, containers, and volumes sudo rm -rf /var/lib/docker OpenVPN See https://github.com/OpenVPN/openvpn-gui . Usage example: sudo openvpn --config myconfig.vopn Time Zone An example under Fedora, timedatectl list_timezones timedatectl set-timezone Europe/London which is useful to synchronise with files obtained from elsewhere. Web-Linux ssh terminal ShellinaBox sudo apt update sudo apt-get install shellinabox sudo systemctl enable shellinaboxd # restart/start/stop sudo service shellinabox restart sudo systemctl start shellinaboxd sudo systemctl restart shellinaboxd sudo systemctl stop shellinaboxd # alternatives which work sudo /etc/init.d/shellinabox start sudo /etc/init.d/shellinabox stop /etc/init.d/shellinabox status so firefox https://127.0.0.1:4200 & allows for login from firefox. cpolar Web: https://www.cpolar.com ( https://dashboard.cpolar.com/signup , https://dashboard.cpolar.com/login , https://dashboard.cpolar.com/get-started ) sudo apt install curl curl -L https://www.cpolar.com/static/downloads/install-release-cpolar.sh | sudo bash cpolar sudo systemctl enable cpolar sudo systemctl start cpolar firefox localhost:9200 & Featured articles Don't like your Linux desktop? Here's how to install an alternative, https://www.zdnet.com/article/dont-like-your-linux-desktop-heres-how-to-install-an-alternative/ .","title":"SYSTEMS"},{"location":"SYSTEMS/#systems","text":"This is a skeleton to list items to be detailed in the near future.","title":"Systems"},{"location":"SYSTEMS/#freedos-and-linux","text":"FreeDOS is available from http://www.freedos.org/ . FreeDOS is an open source DOS-compatible operating system that you can use to play classic DOS games, run legacy business software, or develop embedded systems. Any program that works on MS-DOS should also run on FreeDOS. It is notable that v1.3 provides liveCD and liteUSB which could be useful. Here describes how to convert VMDK format to iso, https://www.ilovefreesoftware.com/26/featured/how-to-convert-vmdk-to-iso-in-windows.html , e.g., qemu-img convert -f vmdk FD13LITE.VMDK pd.raw dd if=pd.raw of=pd.iso We can then use rufus, https://rufus.ie/ , to generate a bootable USB allowing for disk partition by fdisk -- in fact rufus itself can produce a bootable USB nevertheless with no utilities. This is useful to install Linux on very old computers, e.g., reorganise hard drive and then install Fedora from a liveUSB generated from Fedora Media Writer, https://getfedora.org/en/workstation/download/ . There are multiple routes to install particular Linux software; one may prefer to install them as standable but it may also come handy use mini-environments such as Anaconda, Miniconda, Linuxbrew or those already in system (e.g. Ubuntu) archive. A rich source of tips are in the-art-of-command-line and awesome-shell . The following command gives bit information (32 or 64) getconf LONG_BIT The LSB (Linux Standard Base) and distribution information is given with lsb_release -a Under Ubuntu, this could be made available with sudo apt-get install lsb-core . Under Fedora, you may be prompted to install package redhat-lsb-core . Related commands are uname -a and lscpu . The CPU speed can be seen with watch -n.1 \"cat /proc/cpuinfo | grep \\\"^[c]pu MHz\\\"\" The screen utility is operated as follows, screen -S screen -ls screen -r An introduction to Linux Access Control Lists (ACLs), https://www.redhat.com/sysadmin/linux-access-control-lists . To enable color with nano, try find /usr/share/nano/ -iname \"*.nanorc\" -exec echo include {} \\; >> ~/.nanorc","title":"FreeDOS and Linux"},{"location":"SYSTEMS/#fedora","text":"This has already been covered above, i.e., https://getfedora.org/workstation/download/ .","title":"Fedora"},{"location":"SYSTEMS/#ubuntu","text":"Releases are available from http://releases.ubuntu.com and packages are listed at https://packages.ubuntu.com/ . Often it is helpful to run sudo apt update sudo apt upgrade to be in line with the current release; one can check for updates with sudo apt list --upgradable . To resize a virtual disk, follow steps here, https://blog.surges.eu/virtualbox-how-to-solve-the-issue-with-low-disk-space/ . The nautilus desktop can be reset with sudo apt install gnome-tweak-tool gnome-tweaks nautilus-desktop The Unity desktop can be installed with sudo apt-get install unity-session sudo dpkg-reconfigure lightdm Non-root installation may be necessary, e.g., apt download gir1.2-webkit-3.0 apt -i gir1.2-webkit-3.0_2.4.11-3ubuntu3_amd64.deb --force-not-root --root=$HOME Alternatively, we use source package, typically apt-get source package cd package ./configure --prefix=$HOME make make install To unpack a package, one can do dpkg -x package.deb dir When a package URL is available, we can use wget, ar x, xz -d, tar xvf combination to do so. Its archive, http://archive.ubuntu.com/ubuntu/pool/universe , includes beagle, eigensoft, plink, plink-1.9, among others, which can be installed canonically with sudo apt install . sudo apt-get install libcanberra-gtk3-module It is useful to use sudo apt install tasksel , then one can use sudo tasksel . One would get error message such as \"You must put some \u2018source\u2019 URIs in your sources.list\" which can be done as follows sudo apt-get update sudo nano /etc/apt/sources.list # uncomment deb-src here apt-get source hello The system hibernation can be done with sudo systemctl hibernate . Some operations on gnome extensions are given below, sudo apt install gnome-shell-extensions sudo apt search gnome-shell-extension gnome-shell --help sudo apt install gnome-shell-extension-prefs # https://extensions.gnome.org/extension/307/dash-to-dock/ sudo gnome-extensions install dash-to-dockmicxgx.gmail.com.v71.shell-extension.zip sudo apt install gnome-tweaks Installation of dash-to-dock from GitHub proceeds as follows, git clone https://github.com/micheleg/dash-to-dock sudo apt install gettext make -C dash-to-dock install Note that gnome-tweak-tool used earlier on has been updated to gnome-tweaks , which removes gnome shell extensions support from version 40 and it is possible with gnome-shell-extension-prefs . Then the Extension apps enables 'dash-on-dock' as with user themes and Glassy/Glassy-dark through gnome-tweaks by unpacking the relevant files to /usr/share/themes . For WhiteSur, various operations are given below, # https://www.gnome-look.org/p/1403328/ # https://github.com/vinceliuice/WhiteSur-gtk-theme mkdir ~/.themes mkdir ~/.icons tar fvzx WhiteSur-gtk-theme-2022-02-21.tar.gz cd WhiteSur-gtk-theme-2022-02-21/ ./install.sh -s 220 sudo apt install dbus-x11 ./tweaks.sh -d tar xfz WhiteSur-icon-theme-2022-03-18.tar.gz cd WhiteSur-icon-theme-2022-03-18/ /install.sh cd ~/.icons # https://github.com/vinceliuice/WhiteSur-icon-theme # https://www.gnome-look.org/p/1405756/ tar xf 01-WhiteSur.tar.xz sudo apt install dconf-editor dconf-editor & gsettings set org.gnome.shell.extensions.dash-to-dock extend-height false gsettings set org.gnome.shell.extensions.dash-to-dock dock-position BOTTOM gsettings set org.gnome.shell.extensions.dash-to-dock transparency-mode FIXED gsettings set org.gnome.shell.extensions.dash-to-dock dash-max-icon-size 64 gsettings set org.gnome.shell.extensions.dash-to-dock unity-backlit-items true from dconf-editor, follow navigation \"org > gnome > shell > extensions > dash-to-dock\" to customise, see here . Finally, perhaps more appealing is the gnome extension dash-to-panel . Here is information on file sharing, https://www.c-sharpcorner.com/article/how-to-share-files-between-ubuntu-and-windows-10/","title":"Ubuntu"},{"location":"SYSTEMS/#oracle-virtualbox","text":"","title":"Oracle VirtualBox"},{"location":"SYSTEMS/#714","text":"This requires Visual Studio C++ 2.19, which can be downloaded, e.g. https://aka.ms/vs/17/release/vc_redist.x64.exe .","title":"7.1.4"},{"location":"SYSTEMS/#windows-7","text":"To use VirtualBox under Windows 7, one needs to enable virtualisation within security section of BIOS setup. To find out system info, one can run systeminfo A useful tip is from https://blog.csdn.net/xz360717118/article/details/67638548 Failed to instantiate CLSID_VirtualBox w/ IVirtualBox, but CLSID_VirtualBox w/ IUnknown works. 2017\u5e7403\u670828\u65e5 16:50:30 \u4e00\u53ea\u732a\u513f\u866b \u9605\u8bfb\u6570 17551 \u6807\u7b7e\uff1a virtualbox win7 \u66f4\u591a \u4e2a\u4eba\u5206\u7c7b\uff1a \u670d\u52a1\u5668 \u7248\u6743\u58f0\u660e\uff1a\u672c\u6587\u4e3a\u535a\u4e3b\u539f\u521b\u6587\u7ae0\uff0c\u672a\u7ecf\u535a\u4e3b\u5141\u8bb8\u4e0d\u5f97\u8f6c\u8f7d\u3002 https://blog.csdn.net/xz360717118/article/details/67638548 \u6211\u662f win7 64\u4f4d \u89e3\u51b3\u529e\u6cd5\uff1a 1\uff0c win+r \u5feb\u6377\u952e\u6253\u5f00 \u201c\u8fd0\u884c\u201d\uff0c\u8f93\u5165regedit \u6253\u5f00\u6ce8\u518c\u8868 2\uff0c\u627e\u5230 HKEY_CLASSES_ROOT\\CLSID\\{00020420-0000-0000-C000-000000000046} InprocServer32 \u4fee\u6539 \u7b2c\u4e00\u884c\uff08\u9ed8\u8ba4\uff09\u7684\u503c\u4e3a C:\\Windows\\system32\\oleaut32.dll 3\uff0c\u627e\u5230HKEY_CLASSES_ROOT\\CLSID\\{00020424-0000-0000-C000-000000000046} InprocServer32 \u4fee\u6539 \u7b2c\u4e00\u884c\uff08\u9ed8\u8ba4\uff09\u7684\u503c\u4e3a C:\\Windows\\system32\\oleaut32.dll 4\uff0c\u5b8c\u4e8b\u3002\uff08\u6211\u4fee\u6539\u4e86\u5b8c\u4e86 \u4e5f\u6ca1\u8bd5\u7528\uff0c\u76f4\u63a5\u91cd\u542f\u7535\u8111 \u7136\u540e\u6210\u529f\u4e86\uff09 Actually, there is no need to reboot Windows at Step 4. It is possible that installation of Ubuntu could be freezed, in which case a proposal was to proceed with disabling 3D Acceleration and increasing the number of CPUs to 2-4, see https://www.maketecheasier.com/fix-ubuntu-freezing-virtualbox/ . However, our experiment showed that one can enable 3D Acceleration and two CPUs for installation but it is necessary to disable 3D Acceleration and reset CPU to be one for a system with one CPU after installation. The system informatino can be obtained with systeminfo command as described above.","title":"Windows 7"},{"location":"SYSTEMS/#fedora-shared-folders","text":"The guest additions under Fedora 28 is furnished with sudo dnf update sudo dnf install gcc kernel-devel kernel-headers dkms make bzip2 perl cd /run/media/jhz22/VBox_GAs_5.2.12/ sudo ./VBoxLinuxAdditions.run To set up shared folders and enforce shared clipboard for bidirectional copy between Linux and Windows, # shared folders sudo mount -t vboxsf -o uid=jhz22 C /home/jhz22/C sudo mount -t vboxsf -o uid=jhz22 D /home/jhz22/D # shared clipboard killall VBoxClient sudo VBoxClient-all Another attempt is through VBoxMange, e.g., VBoxManage.exe sharedfolder add \"22.04\" --name U --hostpath \"U:\\\" . For Fedora 31, see https://www.if-not-true-then-false.com/2010/install-virtualbox-guest-additions-on-fedora-centos-red-hat-rhel/ . See https://www.nakivo.com/blog/make-virtualbox-full-screen/ on full-screen size, in particular, \"\\Program Files\\Oracle\\VirtualBox\\VBoxManage\" setextradata \"32\" VBoxInternal2/EfiGraphicsResolution 1920x1080 for virtual machine 32.","title":"Fedora & shared folders"},{"location":"SYSTEMS/#compression","text":"Here are the steps, quoting http://www.netreliant.com/news/8/17/Compacting-VirtualBox-Disk-Images-Linux-Guests.html , for compressing large .vdi: # Linux dd if=/dev/zero of=zerofillfile bs=1M rem Windows path D:\\Program Files\\Oracle\\VirtualBox VBoxManage modifyhd --compact \"ubuntu18.04.vdi\"","title":"Compression"},{"location":"SYSTEMS/#ova-file","text":"This is useful, e.g., Windows 11 development environment . Import the OVA file into Oracle VirtualBox: Open Oracle VirtualBox. Select FileImport Appliance. Click Folder to browse to the directory where the OVA file was downloaded. Select the Okta Access Gateway OVA file, and then click Open. Click Next. See https://help.okta.com/oag/en-us/content/topics/access-gateway/deploy-ovb.htm .","title":"OVA file"},{"location":"SYSTEMS/#windows-11","text":"","title":"Windows 11"},{"location":"SYSTEMS/#official-site","text":"From the official ISO location, https://aka.ms/DownloadWindows11 , select \"Download Windows 11 Disk Image (ISO) for x64 devices.\". Verify your download after download, https://www.microsoft.com/en-us/software-download/windows11 , from Windows PowerShell, Get-FileHash D:\\Downloads\\Win11_23H2_EnglishInternational_x64v2.iso Algorithm Hash Path --------- ---- ---- SHA256 705AC061688FFD7F5721DA844D01DF85433856EAFAA8441ECE94B270685CA2DB D:\\Downloads\\Win11_23H2_EnglishInternational_x64v2.iso and also Get-FileHash D:\\Downloads\\Win11_24H2_EnglishInternational_x64.iso Algorithm Hash Path --------- ---- ---- SHA256 D5A4C97C3E835C43B1B9A31933327C001766CE314608BA912F2FFFC876044309 D:\\Downloads\\Win11_24H2_EnglishInternational_x64.iso as listed here, https://learn.microsoft.com/en-us/powershell/module/microsoft.powershell.utility/?view=powershell-7.4 . Hash values for the ISO files for Each Language Country Locale Hash Code Arabic 64-bit 97ED2DF27DEBE5A8E94FB37BE4383EB8516D5C24117793BDA5C1E13568D3F754 Bulgarian 64-bit 1C1BD6E96290521B7E4F814ACA30C2CC4C7FAB1E3076439418673B90954A1FFC Chinese Simplified 64-bit BD1ECBA89BC59B7B62EF12C88C4E70D456EDAC10B969ECB3299EBD458B1F0FB3 Chinese Traditional 64-bit B92C3D6D428D12CF78A8D287B8FB28FFBC44D4A36B74C1B121C3CF443DC67ED2 Croatian 64-bit 95C7008AB1B0BA65212A36FB728AC93B9D7611400A78393FE7B319ED5B74AC5C Czech 64-bit 76120E535DB895D0EA69F47D55B836940821352A010DEDFCBCFC1E22E619FC4B Danish 64-bit D5D34DFFE45BA557581ADA14AC025DB8AA58989D054706061B7D0E5B47F5FDB9 Dutch 64-bit EE9BC545673D8F954A1EDAC691D749438D3E4DFA10A75D2519F79E3708D79FDC English 64-bit B56B911BF18A2CEAEB3904D87E7C770BDF92D3099599D61AC2497B91BF190B11 English International 64-bit D5A4C97C3E835C43B1B9A31933327C001766CE314608BA912F2FFFC876044309 Estonian 64-bit D12DC03FA15A7F436A800692E9BA30DBDDAA4CD6122DD71719A2898E953B5407 Finnish 64-bit D6057E058021A9DF8A02B7BB16331D88C38E8BB63D5AD897D094E0DF6C6ADB5A French 64-bit B73AA55DB50D2AD348F61C6537DA05C0D6DED78A143763454E977BE85B444119 French Canadian 64-bit 3CA47351DAD16BD3F7AFA27CDEB321DD726B79859DE8D2769359C7621DE38EC4 German 64-bit 96E036F1219F9EE59F96312CE43EC7DF093E768383A77132750271940926A013 Greek 64-bit 92248F9F5A8735337D4B0DCCE4DD13348F8718858590FD9D1EB00020B5AFB33A Hebrew 64-bit 27D8090B9266A2DAC04E403FE63E46ADE661A5661BF26CA5EBB1A2F13245E86F Hungarian 64-bit 7B58807592AEB2FC5DDF5AF9749FA023CE9165AA4A1BF4F4741709F8AA2EE9D3 Italian 64-bit D95EC65EC06B4036835C7571FE0108159848D2883EF5DA3A67E480130B1F5862 Japanese 64-bit A1E1BCB6C014F39E4A324EC24DB1B745EE62617D29A450BF7B2596A3DCCECD7D Korean 64-bit 63ED86ADFC53F464649478F931EAE39A42DB3FD86C266C9B5AF7F8E19D318C51 Latvian 64-bit F4C2BF7C16576E6D631070D7B7CF6F55E8359D0729B571C570DC6F39D77D9EA6 Lithuanian 64-bit 23B14643B0AD6FDD0231EA201C5E1B000912EE3A0542F1B1F6907DB470AE7D7B Norwegian 64-bit 6CBD6C3FED9CE08AF85420F19D01C287FC58EC0C42DC7409D1D5C341CEB6492B Polish 64-bit 654273603A945EBA3B185FD5D2C22207A0EB788B5E3402F71E6D0839B3026943 Brazilian Portuguese 64-bit 1BC63E9C62FE3EB7E46778F24C790933770FA7430304583BBDF96B47A5D61F1F Portuguese 64-bit FAAFEDFC301A381B0712FC8DB9F0A16ADE2716B998DD4855D0A38172A9A87AB4 Romanian 64-bit 8B23AD43DF35EB75FADDFDFC85D616A001A4D72C757E5286011E3DC9452A5862 Russian 64-bit D0FBDB93864BF6C8ADE844473C9600EBB031C8BB656A272C736E45DFBD9B3BC9 Serbian Latin 64-bit 566047460EEA2F0E0D36E7A378DCFEEA79D7D3C0328227646BE4AB9AC39A9E36 Slovak 64-bit 5B77F2B5F7C77ABF68E628AC37A8841BB1058B7173C1C76DC5A5F6C5BBA855FD Slovenian 64-bit 73F0DC7CC15885F565C76D78D54E4E4D9934720FFE583B52EFDDA2E2457402D7 Spanish 64-bit 708AF7C9AC63B7EB045CA9B196568758B6C1749E8D13CADE61FAACBC7C66D142 Spanish (Mexico) 64-bit BA7A9B9A2052DEF8C24BCB88C76A47B2E6A6C6EE547EED226B9702C5C63AFC69 Swedish 64-bit F3674D377253E2D12635FDEAB76193E80BD80C56A41D10AB9CCEFDB0CDF1AD82 Thai 64-bit CAD5590347376103E369D7E04941B94C037F4CC6C75D81DC50E194FFA87C8CF8 Turkish 64-bit A47E054FE0B762F5B48D08E2B6F768F4B1CA0BC6DB831A76E829F92E578483AF Ukrainian 64-bit 68BABB954E4BFDF8A03ABC188D5120CF12D4DCA3CAE68EB1BFD4B64F872826E2 Some changes as from October 2024, Microsoft Windows [Version 10.0.22631.4249] (c) Microsoft Corporation. All rights reserved. C:\\Windows\\System32>reg.exe delete \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\CompatMarkers\" /f 2>NUL The operation completed successfully. C:\\Windows\\System32>reg.exe delete \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\Shared\" /f 2>NUL The operation completed successfully. C:\\Windows\\System32>reg.exe delete \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\TargetVersionUpgradeExperienceIndicators\" /f 2>NUL The operation completed successfully. C:\\Windows\\System32>reg.exe add \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\HwReqChk\" /f /v HwReqChkVars /t REG_MULTI_SZ /s , /d \"SQ_SecureBootCapable=TRUE,SQ_SecureBootEnabled=TRUE,SQ_TpmVersion=2,SQ_RamMB=8192,\" The operation completed successfully. C:\\Windows\\System32> C:\\Windows\\System32>reg.exe add \"HKLM\\SYSTEM\\Setup\\MoSetup\" /f /v AllowUpgradesWithUnsupportedTPMOrCPU /t REG_DWORD /d 1 The operation completed successfully. C:\\Windows\\System32> See also ZDNET, https://www.zdnet.com/article/how-to-upgrade-your-incompatible-windows-10-pc-to-windows-11/ & Rufus, https://rufus.ie/en/ . It is reported that the changes above have been implemented in Rufus 4.6.","title":"Official site"},{"location":"SYSTEMS/#build-on-the-fly","text":"git clone https://github.com/AveYo/MediaCreationTool.bat cd MediaCreationTool.bat MediaCreationTool.bat and also https://massgrave.dev/windows_11_links , https://github.com/Raphire/Win11Debloat .","title":"Build on the fly"},{"location":"SYSTEMS/#further-information","text":"CloneVDI is described in this thread, https://forums.virtualbox.org/viewtopic.php?f=6&t=22422 . Moreover, https://www.maketecheasier.com/shrink-your-virtualbox-vm/ and http://bytefreaks.net/windows/reclaim-empty-space-from-virtualbox-vdi-disk-images-by-shrinking . vdi.md as in GWAS-2017 and now listed in GDCT Since one may allocate only part of RAM to VirtualBox, it is often necessary to run program under MS-DOS, e.g., sections on DEPICT. Additional note: 6.1.4 has problem with its Guest Additions. To enable copy/paste through clipboard one can use VBoxGuestAdditions_6.1.97-136310.iso as discused here, https://www.virtualbox.org/ticket/19336. When VirtualBox failed to start a session, one can enter MS-DOS prompt as adminstrator, and issue command bcdedit /set hypervisorlaunchtype off and restart the computer. When there is error message VirtualBox Failed to open session for Virtual Machine , then right click the machine and Discard Saved state . Otherwise, uninstall VirtualBox and reinstall. VirtualBox 7.x.x is considerably easier to set up.","title":"Further information"},{"location":"SYSTEMS/#systemrescue","text":"See https://www.system-rescue.org/","title":"SystemRescue"},{"location":"SYSTEMS/#windows-hyper-v","text":"Web page: https://docs.microsoft.com/en-us/virtualization/hyper-v-on-windows/quick-start/enable-hyper-v Enable Hyper-V to create virtual machines on Windows 10. Hyper-V can be enabled in many ways including using the Windows 10 control panel, PowerShell or using the Deployment Imaging Servicing and Management tool (DISM). This documents walks through each option: Control Panel --> Programs --> Programs and Features --> Turn Windows Features on or off : Hyper-V , Virtual Machine Platforms .","title":"Windows Hyper-V"},{"location":"SYSTEMS/#wsl","text":"Official page: https://github.com/microsoft/WSL To check version of Windows, issue winver (MS-DOS Prompt: Windows + r, cmd, winver).","title":"WSL"},{"location":"SYSTEMS/#installation-and-setup","text":"A description on PowerShell is here, https://learn.microsoft.com/en-us/windows/wsl/install . To initiate from PowerShell, use dism /online /enable-feature /feature-name:Microsoft-Windows-Subsystem-Linux /all /norestart dism /online /enable-feature /featurename:VirtualMachinePlatform /all /norestart After installation, it can be invoked from a MS-DOS Prompt with wsl -help wsl --list wsl --list --online wsl --list --verbose wsl --distribution Ubuntu wsl --set-default Ubuntu-20.04 The command could also takes additional parameters, e.g., -d debian. One can also create a desktop entry pointing to C:\\Windows\\system32\\wsl.exe. Location of the distros are found by cd %LocalAppData%\\Packages\\ cd CanonicalGroupLimited.UbuntuonWindows_79rhkp1fndgsc For old Windoes build, there is complaint about writing BIOS then an update called wsl_update_x64.msi is required. A comparison of WSL 1 and WSL 2 can be seen from https://aka.ms/wsl2 ( https://learn.microsoft.com/en-us/windows/wsl/compare-versions ). To migrate from WSL 1 to WSL 2, several options are possible, # A distribution only wsl --set-version Ubuntu 2 wsl --set-version Ubuntu-20.04 2 # WSL as a whole wsl --set-default-version 2 and return with error messages, Conversion in progress, this may take a few minutes... For information on key differences with WSL 2 please visit https://aka.ms/wsl2 The requested operation could not be completed due to a virtual disk system limitation. Virtual hard disk files must be uncompressed and unencrypted and must not be sparse. To fix this, as in https://logfetch.com/wsl2-uncompressed/ , open up File Explorer and navigate to: C:\\Users\\YOUR_USER\\AppData\\Local\\Packages\\CanonicalGroupLimited... Right click on LocalState , then Properties , then Advanced . Ensure Compress contents to save disk space and Encrypt contents to secure data are both deselected. Click OK , then Apply , then Apply changes to this folder only Software mobaXterm, https://mobaxterm.mobatek.net/ offers Advanced WSL settings , Graphical environments such as Gnome-desktop/LDXE-desktop/XFCE4-desktop so as to create sessions using graphical desktops. The corresponding installations are ubuntu-gnome-desktop (gnome-session-bin), lubuntu-desktop , xubuntu-desktop (in fact xfce4-session, xubuntu-core, xubuntu-default-settings), respectively. After started, create a session for WSL, open it and install software. sudo apt update sudo apt install nautilus sudo apt install firefox etc. By default C:\\ is /mnt/c. To establish other drives, one can do sudo mkdir /mnt/d sudo mount -t drvfs D: /mnt/d sudo umount /mnt/d The network drive can also be mapped, e.g., sudo mkdir /mnt/u sudo mount -t drvfs '\\\\me-filer1.medschl.cam.ac.uk\\home$\\jhz22' /mnt/u sudo mount -t drvfs U: /mnt/u See https://www.cyberciti.biz/faq/ubuntu-linux-install-gnome-desktop-on-server/ . See also https://www.makeuseof.com/tag/linux-desktop-windows-subsystem/ . ubuntu-wsl is a set of WSL utilities, which could be installed with sudo apt install ubuntu-wsl for executables at /usr/bin , e.g., wslvar PATH for Windows' \\%PATH environment variable and wslsys for basic information, wslusc to create a short cut on Windows desktop.","title":"Installation and setup"},{"location":"SYSTEMS/#gui-apps","text":"This is available from https://learn.microsoft.com/en-us/windows/wsl/tutorials/gui-apps . Example applications include gedit, gimp, nautilus, vlc, x11-apps, microsoft-edge ( https://www.microsoftedgeinsider.com/en-us/download?platform=linux-deb ). In case of problems, try Intel\u00ae Driver & Support Assistant (Intel\u00ae DSA), https://www.intel.co.uk/content/www/uk/en/support/detect.html . These files are described https://ubuntu.com/tutorials/install-ubuntu-on-wsl2-on-windows-11-with-gui-support#1-overview showing octave: We start with octave --gui and open scripts julia.m and juliatest.m , select run and then save file and run for figure julia.png , . The flowblade GUI is available with sudo apt-get install flowblade . After installation, one can start with export PYTHONPATH=/usr/share/flowblade/Flowblade:/usr/share/flowblade/Flowblade/vieweditor/:/usr/share/flowblade/Flowblade/tools export SHORTCUTS_PATH=/usr/share/flowblade/Flowblade/res /usr/bin/flowblade or cd /bin;./flowblade , https://github.com/jliljebl/flowblade/issues/857 .","title":"GUI apps"},{"location":"SYSTEMS/#xfce4-rdp","text":"xfce4 can also be made available with sudo apt update sudo apt install xorg sudo apt install xfce4 echo xfce4-session > ~/.xsession xfce4-session & or work with xrdp: sudo apt install xrdp sudo cp /etc/xrdp/xrdp.ini /etc/xrdp/xrdp.ini.bak sudo sed -i 's/port=3389/port=3390/' /etc/xrdp/xrdp.ini sudo /etc/init.d/xrdp restart One can add line sudo service xrdp start to ~/.bashrc . Moreover from a start up directory such as %UserProfile%, create a file named .wslconfig with lines such as processors=4 memory=4GB swap=4GB localhostForwarding=true The remote desktop can be started from DOS Prompt mstsc for localhost:3390 (127.0.0.1:3390). Programs such as FireFox can be started. To avoid running the service from every terminal session, amend the service xrdp as follows, export plus=$(service --status-all 2>&1 | grep xrdp | cut -d' ' -f3) if [[ \"$plus\" != \"+\" ]]; then echo $plus sudo service xrdp start fi Earlier note: We use check IPv4 address from Windows as follows with systeminfo (or ipconfig ) and start rdp to :3390. To avoid a dark screen, select Applications --> Settings --> Light Locker Settings --> Automatically lock the screen --> Never , Apply . One may also execute wsl --shutdown to reninitialize.","title":"xfce4 & rdp"},{"location":"SYSTEMS/#compression-of-disk","text":"wsl --shutdown diskpart select vdist file=\"D:\\wsl\\Ubuntu-2204\\ext4.vhdx\" compact vdisk echo \"%LocalAppData%\\Packages\\CanonicalGroupLimited.UbuntuonWindows_79rhkp1fndgsc\\\\LocalState\\ext4.vhdx\" select vdisk file=\"C:\\Users\\User\\AppData\\Local\\Packages\\CanonicalGroupLimited.UbuntuonWindows_79rhkp1fndgsc\\LocalState\\ext4.vhdx\" compact vdisk","title":"Compression of disk"},{"location":"SYSTEMS/#downloading-specific-distributions","text":"The list is available from https://learn.microsoft.com/en-us/windows/wsl/install-manual but alternatives are also possible. # Ubuntu 21.10 wget https://cloud-images.ubuntu.com/releases/impish/release/ubuntu-21.10-server-cloudimg-amd64-wsl.rootfs.tar.gz mkdir d:\\wsl wsl --import Ubuntu-2110 d:/WSL/Ubuntu-2110 d:/Downloads/ubuntu-21.10-server-cloudimg-amd64-wsl.rootfs.tar.gz wsl -d Ubuntu-2110 adduser jhz22 usermod -aG sudo jhz22 su jhz22 ln -s /mnt/c C ln -s /mnt/d D ln -s /mnt/f F # restart wsl -d Ubuntu-2110 -u jhz22 # Ubuntu 22.04 # https://cloud-images.ubuntu.com/releases/22.04/release-20220923/ubuntu-22.04-server-cloudimg-arm64-wsl.rootfs.tar.gz","title":"Downloading specific distributions"},{"location":"SYSTEMS/#windows-applications","text":"To start firefox or Chrome, here is a simple way, cmd.exe /c start https://github.com echo cmd.exe /c start https://github.com > ${HOME}/bin/edge chmod +x ${HOME}/bin/edge edge It is possible with default applications under Windows, e.g., cmd.exe /c u:/work/eQTL-MR.pptx which opens up PowerPoint directly. One can actually generalize these, e.g., ln -s $HOME/C/Program\\ Files\\ \\(x86\\)/Adobe/Acrobat\\ Reader\\ DC/Reader/AcroRd32.exe /home/$USER/bin/AcroRd32.exe ln -s $HOME/bin/AcroRd32.exe /home/$USER/bin/xpdf followed by a call to AcroRd32.exe and as xpdf , or directly call a list of programs: calc.exe , comp.exe , control.exe , curl.exe , fc.exe , find.exe , finger.exe , mspaint.exe , net.exe , sort.exe , tar.exe , whoami.exe , write.exe , xcopy.exe .","title":"Windows applications"},{"location":"SYSTEMS/#uninstallation","text":"This is achieved by # via wsl wsl --unregister Ubuntu-20.04 # via wslconfig wslconfig /u Ubuntu-20.04","title":"Uninstallation"},{"location":"SYSTEMS/#anaconda","text":"Once installed, it is customary to make several channels accessible, conda config --add channels defaults conda config --add channels conda-forge conda config --add channels bioconda Package in conda-forge include boost, django, glpk, gnuplot, go, gperf, hdf5, ipython, jquery, julia, jupyter, keras, limix, mercurial, miktex, mysql, nano, numpy, pandas, sage, scikit-learn, zlib. Packages in bioconda includes amos, bcftools, beagle, bedops, bedtools, blast, bowtie, bowtie2, bwa, chromhmm, circos, deeptools, emmix, ensembl-vep, fastlmm, fastqc, gatk, gatk4, hclust2, himmer, himmer2, hisat2, igv, impute2, lofreq, mapsplice, mrbayes, ms, nanostat, paml, pbgzip, phylip, picard, plink, plink2, r-wgcna, rsem, rtg-tools, sambamba, samtools, seqkt, sequana, snpeff, snpsift, sra-tools, star, stringtie, tabix, tophat, ucsc-blat, ucsc-liftover, vcftools. For instance, to install intervaltree as required by depict, the following is sufficience, conda install intervaltree All the packages installed can be seen with conda list . To install java, run following command conda install -c anaconda openjdk Other installations include perl, R. Note that conda under Windows is in typically D:/ProgramData/Anaconda2/Library/bin. Altogether we really need to set path=%path%;D:/ProgramData/Anaconda2;D:/ProgramData/Anaconda2/Library/bin Miniconda is available from https://conda.io/miniconda.html . Installation from scratch, wget https://repo.anaconda.com/archive/Anaconda2-2019.03-Linux-ppc64le.sh sh Anaconda2-2019.03-Linux-ppc64le.sh # do not activate at startup conda config --set auto_activate_base false export PYTHONPATH=/scratch/jhz22/lib/python2.7/site-packages/ Examine .bashrc for changes. See https://docs.anaconda.com/anaconda/user-guide/getting-started/ to get started.","title":"Anaconda"},{"location":"SYSTEMS/#github","text":"See physalia for information. It has been suggested to display math using the following premium in a GitHub page, A current repository on GitHub is here, https://cambridge-ceu.github.io/GitHub-matters/ .","title":"GitHub"},{"location":"SYSTEMS/#mercurial","text":"This is associated with the familiar hg command as used for instance by qctool . It is the executable file for Mercurial source code management system, sudo apt install mercurial","title":"mercurial"},{"location":"SYSTEMS/#libraoffice","text":"sudo add-apt-repository ppa:libreoffice/ppa sudo apt-get update sudo apt-get install libreoffice","title":"libraOffice"},{"location":"SYSTEMS/#linuxbrew","text":"Follow http://linuxbrew.sh/ and possibly https://docs.brew.sh sudo apt-get install build-essential sh -c \"$(curl -fsSL https://raw.githubusercontent.com/Linuxbrew/install/master/install.sh)\" echo 'export PATH=\"/home/linuxbrew/.linuxbrew/bin:$PATH\"' >>~/.profile echo 'export MANPATH=\"/home/linuxbrew/.linuxbrew/share/man:$MANPATH\"' >>~/.profile echo 'export INFOPATH=\"/home/linuxbrew/.linuxbrew/share/info:$INFOPATH\"' >>~/.profile PATH=\"/home/linuxbrew/.linuxbrew/bin:$PATH\"","title":"Linuxbrew"},{"location":"SYSTEMS/#aws","text":"http://aws.amazon.com/ The Open Guide to Amazon Web Services . E.g., https://sites.google.com/site/jpopgen/wgsa/create-an-aws-account https://sites.google.com/site/jpopgen/wgsa/launch-an-instance https://sites.google.com/site/jpopgen/wgsa/terminate-an-instance","title":"AWS"},{"location":"SYSTEMS/#modules","text":"It is a system that allows you to easily change between different versions of compilers and other software. Here explains how to set up globally, # https://www.microbialsystems.cn/en/post/xubuntu_env_modules/ wget http://archive.ubuntu.com/ubuntu/pool/universe/m/modules/modules_5.2.0.orig.tar.xz xz -d modules_5.2.0.orig.tar.xz tar xvf modules_5.2.0.orig.tar cd modules-5.2.0 sudo apt-get install tcl-dev tk-dev ./configure make sudo make install ls /usr/local/Modules source /usr/local/Modules/init/bash sudo ln -s /usr/local/Modules/init/profile.sh /etc/profile.d/modules.sh sudo ln -s /usr/local/Modules/init/profile.csh /etc/profile.d/modules.csh echo -e \"\\n# For initiating Modules\" | sudo tee -a /etc/bash.bashrc > /dev/null # Append a line to the end of this file with no return message. echo \". /etc/profile.d/modules.sh\" | sudo tee -a /etc/bash.bashrc > /dev/null less /usr/local/Modules/init/profile.sh module avail module list Modification can be seen, e.g., function module () { curl -sf -XPOST http://modules-mon.hpc.cam.ac.uk/action -H 'Content-Type: application/json' -d '{ \"username\":\"'$USER'\", \"hostname\":\"'$HOSTNAME'\", \"command\":\"'\"$*\"'\" }' 2>&1 > /dev/null; eval `/usr/bin/modulecmd bash $*` } module load matlab/r2014a matlab $@ Usually the eval line is sufficient.","title":"modules"},{"location":"SYSTEMS/#docker","text":"See https://www.docker.com/ and https://docs.docker.com/ # https://docs.docker.com/engine/install/ubuntu/ for pkg in docker.io docker-doc docker-compose docker-compose-v2 podman-docker containerd runc; do sudo apt-get remove $pkg; done # Add Docker's official GPG key: sudo apt-get update sudo apt-get install ca-certificates curl sudo install -m 0755 -d /etc/apt/keyrings sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc sudo chmod a+r /etc/apt/keyrings/docker.asc # Add the repository to Apt sources: echo \\ \"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \\ $(. /etc/os-release && echo \"$VERSION_CODENAME\") stable\" | \\ sudo tee /etc/apt/sources.list.d/docker.list > /dev/null sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin For instance, git clone https://docs.docker.com/engine/install/ubuntu/ docker run --rm -it neoaggelos/knotify /knotify/bin/rna_analysis --sequence AAAAAACUAAUAGAGGGGGGACUUAGCGCCCCCCAAACCGUAACCCC giving CCCAAACCGUAACCCC Sequence: AAAAAACUAAUAGAGGGGGGACUUAGCGCCCCCCAAACCGUAACCCC Structure: ..............((((((.....[[[))))))....]]]...... Energy: -13.800000190734863 Duration: 0.297946 s By default, Docker is only accessible with root privileges (sudo). As a regular user, add your user to the docker group. sudo addgroup --system docker sudo adduser $USER docker newgrp docker sudo snap disable docker sudo snap enable docker For Fedora 33, we have udo dnf -y install dnf-plugins-core sudo dnf config-manager --add-repo https://download.docker.com/linux/fedora/docker-ce.repo sudo dnf install docker-ce docker-ce-cli containerd.io sudo systemctl start docker sudo docker pull eqtlcatalogue/susie-finemapping:v20.08.1 sudo docker run eqtlcatalogue/susie-finemapping:v20.08.1 # remove docker engine # sudo dnf remove docker-ce docker-ce-cli containerd.io # removed all images, containers, and volumes sudo rm -rf /var/lib/docker","title":"docker"},{"location":"SYSTEMS/#openvpn","text":"See https://github.com/OpenVPN/openvpn-gui . Usage example: sudo openvpn --config myconfig.vopn","title":"OpenVPN"},{"location":"SYSTEMS/#time-zone","text":"An example under Fedora, timedatectl list_timezones timedatectl set-timezone Europe/London which is useful to synchronise with files obtained from elsewhere.","title":"Time Zone"},{"location":"SYSTEMS/#web-linux-ssh-terminal","text":"","title":"Web-Linux ssh terminal"},{"location":"SYSTEMS/#shellinabox","text":"sudo apt update sudo apt-get install shellinabox sudo systemctl enable shellinaboxd # restart/start/stop sudo service shellinabox restart sudo systemctl start shellinaboxd sudo systemctl restart shellinaboxd sudo systemctl stop shellinaboxd # alternatives which work sudo /etc/init.d/shellinabox start sudo /etc/init.d/shellinabox stop /etc/init.d/shellinabox status so firefox https://127.0.0.1:4200 & allows for login from firefox.","title":"ShellinaBox"},{"location":"SYSTEMS/#cpolar","text":"Web: https://www.cpolar.com ( https://dashboard.cpolar.com/signup , https://dashboard.cpolar.com/login , https://dashboard.cpolar.com/get-started ) sudo apt install curl curl -L https://www.cpolar.com/static/downloads/install-release-cpolar.sh | sudo bash cpolar sudo systemctl enable cpolar sudo systemctl start cpolar firefox localhost:9200 &","title":"cpolar"},{"location":"SYSTEMS/#featured-articles","text":"Don't like your Linux desktop? Here's how to install an alternative, https://www.zdnet.com/article/dont-like-your-linux-desktop-heres-how-to-install-an-alternative/ .","title":"Featured articles"},{"location":"Utilities/","text":"Web-related notes aria2 Web: https://aria2.github.io/ . The first example, aria2c -c -j10 -i ../urls.txt , specifies that to continue (-c) interrupted download, to use 10 concurrent threads (-j10) and use links in urls.txt . The second example uses - as input: echo https://download.decode.is/s3/download?token=68278faa-0b69-47a0-8fcb-5e7f4057004d&file=10023_32_VDR_VDR.txt.gz | aria2c -i - . wget Some useful instances are as follows, wget -nd --execute=\"robots = off\" --mirror --convert-links --no-parent --wait=5 http://ftp.ebi.ac.uk/pub/databases/spot/eQTL/csv/GTEx_V8/ge/ wget --no-check-certificate https://omicscience.org/apps/pgwas/data/all.grch37.tabix.gz wget --no-check-certificate https://omicscience.org/apps/pgwas/data/all.grch37.tabix.gz.tbi To keep the original timestamp, use -S option. It could be the appropriate way to download directory from a GitHub repository, e.g., git clone https://github.com/statgen/locuszoom.git for https://github.com/statgen/locuszoom/tree/develop/examples/data . lftp This is a sophisticated file transfer program. Non-interactive authentication for both ftp and sftp can be enabled by lftp , which can be installed with # Fedora sudo dnf install lftp # Ubuntu sudo apt install lftp Note in both cases command delimiters are required. Usage example: lftp -c mirror https://ftp.ebi.ac.uk/pub/databases/gwas/summary_statistics/GCST90086001-GCST90087000/ . The options of mirror can be seen with help mirror inside lftp; once done type quit to exit. --- ftp --- We use GENCODE, https://www.gencodegenes.org/human/ ( https://ftp.ebi.ac.uk/pub/databases/gencode/Gencode_human/ ), to illustrate, The following code is used to download release_43. #!/usr/bin/bash HOST=ftp.ebi.ac.uk USER=anonymous PASS=s@cam.ac.uk FTPURL=ftp://$USER:$PASS@$HOST LCD=. RCD=/pub/databases/gencode/Gencode_human/release_43 lftp $HOST <% set_layout(\"hello\", \"Dash\") app %>% set_layout(div(\"hello\"), \"Dash\") app %>% set_layout(list(div(\"hello\"), \"Dash\")) app %>% set_layout(\"Conditional UI using an if statement: \", if (TRUE) \"rendered\", if (FALSE) \"not rendered\") app %>% set_layout(function() { div(\"Current time: \", Sys.time()) }) app A plotly figure (e.g., https://plotly-r.com/ can be passed on as follows, library(plotly) fig <- plot_ly() # fig <- fig %>% add_trace( ... ) # fig <- fig %>% layout( ... ) library(dash) library(dashCoreComponents) library(dashHtmlComponents) app <- Dash$new() app$layout( htmlDiv( list( dccGraph(figure=fig) ) ) ) app$run_server(debug=TRUE, dev_tools_hot_reload=FALSE) The view requires WebGL, see https://get.webgl.org . djvulibre wget https://github.com/barak/djvulibre/archive/debian/3.5.27.1-14.zip unzip 3.5.27.1-14.zip |more cd djvulibre-debian-3.5.27.1-14/ ./autogen.sh configure --prefix=/rds-d4/user/jhz22/hpc-work/ make make install Google-chrome Installation is possible with wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb sudo dpkg -i google-chrome-stable_current_amd64.deb then the Google repository is also added. Mobile tethering See information from here, https://ee.co.uk/help/help-new/network-and-coverage/tethering-or-sharing-internet/how-do-i-share-my-devices-data-connection-through-a-personal-hotspot-or-tethering . locale This is an example to convert from French encoding, #!/usr/bin/bash if [ ! -d ascii ]; then mkdir ascii; fi cd ascii echo $LANG export LANG=en_US.utf8 iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"Data Graph Categorization.csv\" > \"Data Graph Categorization.csv\" iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"Data Graph Coverage Text Books 1980-2016.csv\" > \"Data Graph Coverage Text Books 1980-2016.csv\" iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"Data Graph Features.csv\" > \"Data Graph Features.csv\" iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"R Code Data Analyses.R\" > \"R Code Data Analyses.R\" # needs comment on -prev_p[order(-prev_p[,3]),] iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"R Code Figures.R\" > \"R Code Figures.R\" R --no-save < \"R Code Data Analyses.R\" R --no-save < \"R Code Figures.R\" cd - as in the following paper, Kossmeier et al. Charting the landscape of graphical displays for meta-analysis and systematic reviews: a comprehensive review, taxonomy, and feature analysis. BMC Medical Research Methodology (2020) 20:26, https://doi.org/10.1186/s12874-020-0911-9 Perhaps a somewhat easier way to do is through RStudio's File --> Save with Encoding and choose UTF-8 . Google document and EndNote It is possible to insert citation from an EndNote library, download as RTF more preferably OpenDocument format, and recover the citations through Tools --> Format paper, e.g., https://libguides.jcu.edu.au/endnote/google-docs sphinx The sequence below follows, https://docs.readthedocs.io/en/stable/intro/getting-started-with-sphinx.html. module load python/3.6 virtualenv --system-site-package venv source venv/bin/activate pip install sphinx mkdir docs cd docs sphinx-quickstart make html pip install recommonmark Synchronisation We can employ rsync to synchronise the working node to the web space, e.g., rsync -avrzP $HOME/public_html shell.srcf.net:/public/$HOME Note that it works equally well for backup of files locally. Web site file permission The following commands set read permission to a web site hosted at /public/$HOME/public_html . chmod -R +r /public/$HOME/public_html find /public/$HOME/public_html -type d -exec chmod +x {} \\;","title":"Utilties"},{"location":"Utilities/#web-related-notes","text":"","title":"Web-related notes"},{"location":"Utilities/#aria2","text":"Web: https://aria2.github.io/ . The first example, aria2c -c -j10 -i ../urls.txt , specifies that to continue (-c) interrupted download, to use 10 concurrent threads (-j10) and use links in urls.txt . The second example uses - as input: echo https://download.decode.is/s3/download?token=68278faa-0b69-47a0-8fcb-5e7f4057004d&file=10023_32_VDR_VDR.txt.gz | aria2c -i - .","title":"aria2"},{"location":"Utilities/#wget","text":"Some useful instances are as follows, wget -nd --execute=\"robots = off\" --mirror --convert-links --no-parent --wait=5 http://ftp.ebi.ac.uk/pub/databases/spot/eQTL/csv/GTEx_V8/ge/ wget --no-check-certificate https://omicscience.org/apps/pgwas/data/all.grch37.tabix.gz wget --no-check-certificate https://omicscience.org/apps/pgwas/data/all.grch37.tabix.gz.tbi To keep the original timestamp, use -S option. It could be the appropriate way to download directory from a GitHub repository, e.g., git clone https://github.com/statgen/locuszoom.git for https://github.com/statgen/locuszoom/tree/develop/examples/data .","title":"wget"},{"location":"Utilities/#lftp","text":"This is a sophisticated file transfer program. Non-interactive authentication for both ftp and sftp can be enabled by lftp , which can be installed with # Fedora sudo dnf install lftp # Ubuntu sudo apt install lftp Note in both cases command delimiters are required. Usage example: lftp -c mirror https://ftp.ebi.ac.uk/pub/databases/gwas/summary_statistics/GCST90086001-GCST90087000/ . The options of mirror can be seen with help mirror inside lftp; once done type quit to exit.","title":"lftp"},{"location":"Utilities/#-ftp-","text":"We use GENCODE, https://www.gencodegenes.org/human/ ( https://ftp.ebi.ac.uk/pub/databases/gencode/Gencode_human/ ), to illustrate, The following code is used to download release_43. #!/usr/bin/bash HOST=ftp.ebi.ac.uk USER=anonymous PASS=s@cam.ac.uk FTPURL=ftp://$USER:$PASS@$HOST LCD=. RCD=/pub/databases/gencode/Gencode_human/release_43 lftp $HOST <% set_layout(\"hello\", \"Dash\") app %>% set_layout(div(\"hello\"), \"Dash\") app %>% set_layout(list(div(\"hello\"), \"Dash\")) app %>% set_layout(\"Conditional UI using an if statement: \", if (TRUE) \"rendered\", if (FALSE) \"not rendered\") app %>% set_layout(function() { div(\"Current time: \", Sys.time()) }) app A plotly figure (e.g., https://plotly-r.com/ can be passed on as follows, library(plotly) fig <- plot_ly() # fig <- fig %>% add_trace( ... ) # fig <- fig %>% layout( ... ) library(dash) library(dashCoreComponents) library(dashHtmlComponents) app <- Dash$new() app$layout( htmlDiv( list( dccGraph(figure=fig) ) ) ) app$run_server(debug=TRUE, dev_tools_hot_reload=FALSE) The view requires WebGL, see https://get.webgl.org .","title":"--- R ---"},{"location":"Utilities/#djvulibre","text":"wget https://github.com/barak/djvulibre/archive/debian/3.5.27.1-14.zip unzip 3.5.27.1-14.zip |more cd djvulibre-debian-3.5.27.1-14/ ./autogen.sh configure --prefix=/rds-d4/user/jhz22/hpc-work/ make make install","title":"djvulibre"},{"location":"Utilities/#google-chrome","text":"Installation is possible with wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb sudo dpkg -i google-chrome-stable_current_amd64.deb then the Google repository is also added.","title":"Google-chrome"},{"location":"Utilities/#mobile-tethering","text":"See information from here, https://ee.co.uk/help/help-new/network-and-coverage/tethering-or-sharing-internet/how-do-i-share-my-devices-data-connection-through-a-personal-hotspot-or-tethering .","title":"Mobile tethering"},{"location":"Utilities/#locale","text":"This is an example to convert from French encoding, #!/usr/bin/bash if [ ! -d ascii ]; then mkdir ascii; fi cd ascii echo $LANG export LANG=en_US.utf8 iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"Data Graph Categorization.csv\" > \"Data Graph Categorization.csv\" iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"Data Graph Coverage Text Books 1980-2016.csv\" > \"Data Graph Coverage Text Books 1980-2016.csv\" iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"Data Graph Features.csv\" > \"Data Graph Features.csv\" iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"R Code Data Analyses.R\" > \"R Code Data Analyses.R\" # needs comment on -prev_p[order(-prev_p[,3]),] iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"R Code Figures.R\" > \"R Code Figures.R\" R --no-save < \"R Code Data Analyses.R\" R --no-save < \"R Code Figures.R\" cd - as in the following paper, Kossmeier et al. Charting the landscape of graphical displays for meta-analysis and systematic reviews: a comprehensive review, taxonomy, and feature analysis. BMC Medical Research Methodology (2020) 20:26, https://doi.org/10.1186/s12874-020-0911-9 Perhaps a somewhat easier way to do is through RStudio's File --> Save with Encoding and choose UTF-8 .","title":"locale"},{"location":"Utilities/#google-document-and-endnote","text":"It is possible to insert citation from an EndNote library, download as RTF more preferably OpenDocument format, and recover the citations through Tools --> Format paper, e.g., https://libguides.jcu.edu.au/endnote/google-docs","title":"Google document and EndNote"},{"location":"Utilities/#sphinx","text":"The sequence below follows, https://docs.readthedocs.io/en/stable/intro/getting-started-with-sphinx.html. module load python/3.6 virtualenv --system-site-package venv source venv/bin/activate pip install sphinx mkdir docs cd docs sphinx-quickstart make html pip install recommonmark","title":"sphinx"},{"location":"Utilities/#synchronisation","text":"We can employ rsync to synchronise the working node to the web space, e.g., rsync -avrzP $HOME/public_html shell.srcf.net:/public/$HOME Note that it works equally well for backup of files locally.","title":"Synchronisation"},{"location":"Utilities/#web-site-file-permission","text":"The following commands set read permission to a web site hosted at /public/$HOME/public_html . chmod -R +r /public/$HOME/public_html find /public/$HOME/public_html -type d -exec chmod +x {} \\;","title":"Web site file permission"},{"location":"Flask/","text":"ChatGPT under Flask As ususal, this is furnished with app.py and config.py with templates in its simplest form. Flask/ \u251c\u2500\u2500 app.py \u251c\u2500\u2500 config.py \u251c\u2500\u2500 README.md (this file) \u2514\u2500\u2500 templates \u2514\u2500\u2500 index.html which uses environment variable from export OPENAI_API_KEY=$(grep sk ~/doc/OpenAI) . By default, python app.py will enable http://127.0.0.1:5000 : $ python app.py * Serving Flask app 'app' * Debug mode: on WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead. * Running on http://127.0.0.1:5000 Press CTRL+C to quit * Restarting with watchdog (inotify) * Debugger is active! * Debugger PIN: 711-120-470 127.0.0.1 - - [05/Dec/2024 21:48:34] \"GET / HTTP/1.1\" 200 - 127.0.0.1 - - [05/Dec/2024 21:48:34] \"GET / HTTP/1.1\" 200 - Considerable coverage has been given on Flask, e.g., https://cambridge-ceu.github.io/GitHub-matters/Flask/ , and WSGI is touched upon here, https://cambridge-ceu.github.io/CEU-matters/Flask/ .","title":"Flask"},{"location":"Flask/#chatgpt-under-flask","text":"As ususal, this is furnished with app.py and config.py with templates in its simplest form. Flask/ \u251c\u2500\u2500 app.py \u251c\u2500\u2500 config.py \u251c\u2500\u2500 README.md (this file) \u2514\u2500\u2500 templates \u2514\u2500\u2500 index.html which uses environment variable from export OPENAI_API_KEY=$(grep sk ~/doc/OpenAI) . By default, python app.py will enable http://127.0.0.1:5000 : $ python app.py * Serving Flask app 'app' * Debug mode: on WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead. * Running on http://127.0.0.1:5000 Press CTRL+C to quit * Restarting with watchdog (inotify) * Debugger is active! * Debugger PIN: 711-120-470 127.0.0.1 - - [05/Dec/2024 21:48:34] \"GET / HTTP/1.1\" 200 - 127.0.0.1 - - [05/Dec/2024 21:48:34] \"GET / HTTP/1.1\" 200 - Considerable coverage has been given on Flask, e.g., https://cambridge-ceu.github.io/GitHub-matters/Flask/ , and WSGI is touched upon here, https://cambridge-ceu.github.io/CEU-matters/Flask/ .","title":"ChatGPT under Flask"}]} \ No newline at end of file +{"config":{"indexing":"full","lang":["en"],"min_search_length":3,"prebuild_index":false,"separator":"[\\s\\-]+"},"docs":[{"location":"","text":"Computational Statistics Online resources for computational statistics The repository intends to make it easier to keep pace with releases of Linux systems and R, among others; select the menu items above for details. INSTALL . Installation and setup. LANGUAGES . Language-related contents. PARALLEL . Parallel computing. REPRODUCE . Reproducible research. SYSTEMS . System-related materials. WEB: AI-related sites , Computing resources , Flask , Utilities","title":""},{"location":"#computational-statistics","text":"Online resources for computational statistics The repository intends to make it easier to keep pace with releases of Linux systems and R, among others; select the menu items above for details. INSTALL . Installation and setup. LANGUAGES . Language-related contents. PARALLEL . Parallel computing. REPRODUCE . Reproducible research. SYSTEMS . System-related materials. WEB: AI-related sites , Computing resources , Flask , Utilities","title":"Computational Statistics"},{"location":"AI/","text":"AI-related sites AI Act, https://digital-strategy.ec.europa.eu/en/policies/regulatory-framework-ai Anthropic, https://www.anthropic.com/ Claude, https://claude.ai sonnet, https://www.anthropic.com/claude/sonnet GitHub, https://github.com/lm-sys LMSYS Org, https://lmsys.org/ Chatbot Arena, https://lmarena.ai/ ChatPDF, https://www.chatpdf.com/ Crayon, https://www.craiyon.com/ fast.ai, https://github.com/fastai Google, https://www.google.com/ Colab, https://colab.research.google.com/ DeepMind, https://deepmind.google/ Gemini, https://gemini.google.com/ Hugging Face, https://huggingface.co Datasets, https://huggingface.co/datasets HuggingChat, https://huggingface.co/chat/ Llama-3.3-70B-Instruct, https://huggingface.co/meta-llama/Llama-3.3-70B-Instruct Models, https://huggingface.co/models Kimi chat, https://kimi.moonshot.cn/ (PDF processing) Meta, https://www.facebook.com/business Llama, https://www.llama.com/ ( terms ) Meta AI, https://www.meta.ai/ Nougat, https://facebookresearch.github.io/nougat/ MicroSoft Bing Image Creator, https://www.bing.com/images/create/ Midjourney, https://www.midjourney.com/home No-Code LLM App Builders Dify, https://dify.ai/ Flowwise AI, https://flowiseai.com/ LangFlow, https://www.langflow.org/ OpenAI, https://openai.com/ ChatGPT, https://chatgpt.com/ CLIP, https://openai.com/index/clip/ DALL\u00b7E 3, https://openai.com/index/dall-e-3/ GitHub, https://github.com/openai sora, https://openai.com/index/sora/ Open-source LLMs DeepEval (>> Humanloop), https://github.com/confident-ai/deepeval Llama-3.1 (>> Open AI GPT-4), https://huggingface.co/meta-llama/Llama-3.1-8B LangChain (>> OpenAI Assistants), https://github.com/langchain-ai/langchain Flowwise (>> Relevance AI), https://github.com/FlowiseAI/Flowise Lite LLM (>> Martian AI ), https://github.com/BerriAI/litellm PaddlePaddle, https://github.com/PaddlePaddle Perplexity AI, https://www.perplexity.ai/ (search) Reka, https://chat.reka.ai/ Stability AI, https://stability.ai/ Stable diffusion, https://github.com/CompVis","title":"AI-related sites"},{"location":"AI/#ai-related-sites","text":"AI Act, https://digital-strategy.ec.europa.eu/en/policies/regulatory-framework-ai Anthropic, https://www.anthropic.com/ Claude, https://claude.ai sonnet, https://www.anthropic.com/claude/sonnet GitHub, https://github.com/lm-sys LMSYS Org, https://lmsys.org/ Chatbot Arena, https://lmarena.ai/ ChatPDF, https://www.chatpdf.com/ Crayon, https://www.craiyon.com/ fast.ai, https://github.com/fastai Google, https://www.google.com/ Colab, https://colab.research.google.com/ DeepMind, https://deepmind.google/ Gemini, https://gemini.google.com/ Hugging Face, https://huggingface.co Datasets, https://huggingface.co/datasets HuggingChat, https://huggingface.co/chat/ Llama-3.3-70B-Instruct, https://huggingface.co/meta-llama/Llama-3.3-70B-Instruct Models, https://huggingface.co/models Kimi chat, https://kimi.moonshot.cn/ (PDF processing) Meta, https://www.facebook.com/business Llama, https://www.llama.com/ ( terms ) Meta AI, https://www.meta.ai/ Nougat, https://facebookresearch.github.io/nougat/ MicroSoft Bing Image Creator, https://www.bing.com/images/create/ Midjourney, https://www.midjourney.com/home No-Code LLM App Builders Dify, https://dify.ai/ Flowwise AI, https://flowiseai.com/ LangFlow, https://www.langflow.org/ OpenAI, https://openai.com/ ChatGPT, https://chatgpt.com/ CLIP, https://openai.com/index/clip/ DALL\u00b7E 3, https://openai.com/index/dall-e-3/ GitHub, https://github.com/openai sora, https://openai.com/index/sora/ Open-source LLMs DeepEval (>> Humanloop), https://github.com/confident-ai/deepeval Llama-3.1 (>> Open AI GPT-4), https://huggingface.co/meta-llama/Llama-3.1-8B LangChain (>> OpenAI Assistants), https://github.com/langchain-ai/langchain Flowwise (>> Relevance AI), https://github.com/FlowiseAI/Flowise Lite LLM (>> Martian AI ), https://github.com/BerriAI/litellm PaddlePaddle, https://github.com/PaddlePaddle Perplexity AI, https://www.perplexity.ai/ (search) Reka, https://chat.reka.ai/ Stability AI, https://stability.ai/ Stable diffusion, https://github.com/CompVis","title":"AI-related sites"},{"location":"Computing/","text":"Computing resources Online publications Cambridge English Dictionary, https://dictionary.cambridge.org/dictionary/ Free literature for your device, https://www.planetebook.com/ Handbook of Statistics, https://www.sciencedirect.com/handbook/handbook-of-statistics Merriam-Webster dictionary, https://www.merriam-webster.com/ ManyBooks, https://manybooks.net/ Springer Link, https://link.springer.com/ The book of statistical proofs, https://statproofbook.github.io/ Reference management AuthorArranger, https://authorarranger.nci.nih.gov/ citavi, https://www.citavi.com/ EndNote, https://support.clarivate.com/Endnote/s/?language=en_US ( MyEndNoteWeb ) JabRef, https://www.jabref.org/ Lead2Amazon, https://lead.to/amazon/uk/ Mendeley, https://www.mendeley.com/ Papers, https://www.papersapp.com/ ProQuest, https://about.proquest.com/en/ ( https://refworks.proquest.com/ ) PubMed, https://pubmed.ncbi.nlm.nih.gov/ Qiqqa, https://github.com/jimmejardine/qiqqa-open-source Sciwheel, get started ( Google Docs add-on , MicroSoft Word add-on ) Web of science, https://www.webofscience.com/wos/woscc/basic-search Zotero, https://www.zotero.org/ ( https://zbib.org/ ) Organisations Apache, https://httpd.apache.org/ APKmirror, https://www.apkmirror.com/ ARIA, https://www.aria.org.uk/ ( https://substack.com/@ariaresearch ) CRA, https://cra.org/ CRAN, https://cran.r-project.org/ CS rankings, https://csrankings.org/ CyberChef, https://gchq.github.io/CyberChef/ Google, https://www.google.co.uk/ Adwords keywords planner, https://ads.google.com/home/tools/keyword-planner/ App Engine, https://cloud.google.com/appengine/ Check My Links DeepMind, https://deepmind.google/ Gemini, https://ai.google.dev/ My groups, https://groups.google.com/my-groups Search console, https://search.google.com/search-console/about Translate, https://translate.google.co.uk/ MicroSoft, https://www.microsoft.com/ Account, https://account.live.com/consent/Manage?uaid=00be9fb584174427b1cffb995d934b24 Bing webmaster tool, https://www.bing.com/webmasters/about Bing image creator, https://www.bing.com/images/create/ dotnet, https://dotnet.microsoft.com/en-us/ Forms GatesNotes, https://www.gatesnotes.com/ Hotmail, https://outlook.live.com/owa/ LINQpad, https://www.linqpad.net/ Support, https://support.microsoft.com/en-us Sway, https://sway.office.com/ Windows 11 virtual machines, https://developer.microsoft.com/en-us/windows/downloads/virtual-machines/ Mozilla, https://www.mozilla.org/en-GB/about/ MySQL, https://dev.mysql.com/ Paper with code, https://paperswithcode.com/ Raspberry Pi OS, https://www.raspberrypi.com/software/ Science Media Centre, https://www.sciencemediacentre.org/ Twitter, https://github.com/twitter zenodo, https://zenodo.org/ Western Digital, https://www.westerndigital.com/ Linux sites Entroware, https://www.entroware.com/store/index.php Fedora, https://getfedora.org/ GNU parallel, https://www.biostars.org/p/63816/ linuxconfig.org, https://linuxconfig.org/ Linux command, https://wangchujiang.com/linux-command/ mythic-beasts, https://www.mythic-beasts.com/ OBI download, https://www.oracle.com/middleware/technologies/business-intelligence-v12213-downloads.html SRCF, https://www.srcf.net/ (site, https://jhz22.user.srcf.net/ ) VirtualBox, https://www.virtualbox.org/ (all downloads, https://download.virtualbox.org/virtualbox/ ) Online documentation/software/utilities Adobe online, https://www.adobe.com/uk/acrobat/online.html Aiseesoft background remover, https://www.aiseesoft.com/online-bg-remover/ apexCharts.js, https://apexcharts.com/ Avalonia Visual Basic6, https://github.com/BAndysc/AvaloniaVisualBasic6 balenaEtcher, https://etcher.balena.io/ Brave, https://brave.com/ brms, https://paul-buerkner.github.io/brms/index.html ( CRAN ) Calibre, https://calibre-ebook.com/ Click, https://click.palletsprojects.com ConvertWizard, https://convertwizard.com/ Dillinger, https://dillinger.io/ Django 5.0 documentation, https://docs.djangoproject.com/en/5.0/ DjVuLibre, http://djvu.sourceforge.net/ djvu.org, http://djvu.org EPUB reader online, https://epub-reader.online/ Equation Editor, https://editor.codecogs.com/ Flapjax, https://www.flapjax-lang.org/ Flask, https://flask.palletsprojects.com/ Foxit, https://www.foxitsoftware.com/ FreeCAD, https://www.freecad.org/ gravis, https://robert-haas.github.io/gravis-docs/index.html HivisionIDPhotos, https://swanhub.co/ZeYiLin/HivisionIDPhotos ( GitHub Icecream PDF Editor, https://icecreamapps.com/PDF-Editor/ iLoveIMG, https://www.iloveimg.com/ iLovePDF, https://www.ilovepdf.com/ IMGonline.com.ua, https://www.imgonline.com.ua/eng/resize-image.php INBO tutorials, https://inbo.github.io/tutorials/ Jinja, https://jinja.palletsprojects.com/ jqplay, https://jqplay.org/ JSEditor, https://jseditor.io/ Krite, https://krita.org/en/ MConverter, https://mconverter.eu/ ( docx2html ) Mermaid, https://mermaid.js.org/ (live editor, https://mermaid.live/ ) MobaXterm, https://mobaxterm.mobatek.net/ nbviewer, https://nbviewer.org/ OddPrints, https://www.oddprints.com/ Oh My Posh, https://ohmyposh.dev/ Online2PDF, https://online2pdf.com/docx2pdf PaddleOCR, https://github.com/PaddlePaddle/PaddleOCR PDF24, https://en.pdf24.org/ PDF2DJVU, https://pdf2djvu.com/ PDFgear, https://www.pdfgear.com/ PDF-XCHANGE, https://www.pdf-xchange.com/ (discontinued viewer ) PNG2PDF, https://png2pdf.com/ PhotoScissors, https://photoscissors.com/ QGIS, https://www.qgis.org/ Quarto, https://quarto.org/ RStudio, https://posit.co/ ( https://www.rstudio.com/ ) download [Mastering Shiny](https://mastering-shiny.org/ Positron shinyapps Rufus, https://rufus.ie/en/ Scribus, https://sourceforge.net/projects/scribus/ Speedtest, https://www.speedtest.net/ Template-Rcpp, https://github.com/stsds/Template-Rcpp (LinkedIn post ) text-utils, https://www.text-utils.com/ time.is, https://time.is/ Tabby, https://tabby.sh Typora, https://typora.io/ uLisp, http://www.ulisp.com/ WebGL, https://get.webgl.org/ WinToHDD, https://www.easyuefi.com/wintohdd/index.html (also easyUEFI ) WSGI, https://wsgi.readthedocs.io/ Yaak, https://yaak.app/ yozosoft, https://www.yozosoft.com/ zotero-gpt, https://github.com/MuiseDestiny/zotero-gpt","title":"Computing resources"},{"location":"Computing/#computing-resources","text":"","title":"Computing resources"},{"location":"Computing/#online-publications","text":"Cambridge English Dictionary, https://dictionary.cambridge.org/dictionary/ Free literature for your device, https://www.planetebook.com/ Handbook of Statistics, https://www.sciencedirect.com/handbook/handbook-of-statistics Merriam-Webster dictionary, https://www.merriam-webster.com/ ManyBooks, https://manybooks.net/ Springer Link, https://link.springer.com/ The book of statistical proofs, https://statproofbook.github.io/","title":"Online publications"},{"location":"Computing/#reference-management","text":"AuthorArranger, https://authorarranger.nci.nih.gov/ citavi, https://www.citavi.com/ EndNote, https://support.clarivate.com/Endnote/s/?language=en_US ( MyEndNoteWeb ) JabRef, https://www.jabref.org/ Lead2Amazon, https://lead.to/amazon/uk/ Mendeley, https://www.mendeley.com/ Papers, https://www.papersapp.com/ ProQuest, https://about.proquest.com/en/ ( https://refworks.proquest.com/ ) PubMed, https://pubmed.ncbi.nlm.nih.gov/ Qiqqa, https://github.com/jimmejardine/qiqqa-open-source Sciwheel, get started ( Google Docs add-on , MicroSoft Word add-on ) Web of science, https://www.webofscience.com/wos/woscc/basic-search Zotero, https://www.zotero.org/ ( https://zbib.org/ )","title":"Reference management"},{"location":"Computing/#organisations","text":"Apache, https://httpd.apache.org/ APKmirror, https://www.apkmirror.com/ ARIA, https://www.aria.org.uk/ ( https://substack.com/@ariaresearch ) CRA, https://cra.org/ CRAN, https://cran.r-project.org/ CS rankings, https://csrankings.org/ CyberChef, https://gchq.github.io/CyberChef/ Google, https://www.google.co.uk/ Adwords keywords planner, https://ads.google.com/home/tools/keyword-planner/ App Engine, https://cloud.google.com/appengine/ Check My Links DeepMind, https://deepmind.google/ Gemini, https://ai.google.dev/ My groups, https://groups.google.com/my-groups Search console, https://search.google.com/search-console/about Translate, https://translate.google.co.uk/ MicroSoft, https://www.microsoft.com/ Account, https://account.live.com/consent/Manage?uaid=00be9fb584174427b1cffb995d934b24 Bing webmaster tool, https://www.bing.com/webmasters/about Bing image creator, https://www.bing.com/images/create/ dotnet, https://dotnet.microsoft.com/en-us/ Forms GatesNotes, https://www.gatesnotes.com/ Hotmail, https://outlook.live.com/owa/ LINQpad, https://www.linqpad.net/ Support, https://support.microsoft.com/en-us Sway, https://sway.office.com/ Windows 11 virtual machines, https://developer.microsoft.com/en-us/windows/downloads/virtual-machines/ Mozilla, https://www.mozilla.org/en-GB/about/ MySQL, https://dev.mysql.com/ Paper with code, https://paperswithcode.com/ Raspberry Pi OS, https://www.raspberrypi.com/software/ Science Media Centre, https://www.sciencemediacentre.org/ Twitter, https://github.com/twitter zenodo, https://zenodo.org/ Western Digital, https://www.westerndigital.com/","title":"Organisations"},{"location":"Computing/#linux-sites","text":"Entroware, https://www.entroware.com/store/index.php Fedora, https://getfedora.org/ GNU parallel, https://www.biostars.org/p/63816/ linuxconfig.org, https://linuxconfig.org/ Linux command, https://wangchujiang.com/linux-command/ mythic-beasts, https://www.mythic-beasts.com/ OBI download, https://www.oracle.com/middleware/technologies/business-intelligence-v12213-downloads.html SRCF, https://www.srcf.net/ (site, https://jhz22.user.srcf.net/ ) VirtualBox, https://www.virtualbox.org/ (all downloads, https://download.virtualbox.org/virtualbox/ )","title":"Linux sites"},{"location":"Computing/#online-documentationsoftwareutilities","text":"Adobe online, https://www.adobe.com/uk/acrobat/online.html Aiseesoft background remover, https://www.aiseesoft.com/online-bg-remover/ apexCharts.js, https://apexcharts.com/ Avalonia Visual Basic6, https://github.com/BAndysc/AvaloniaVisualBasic6 balenaEtcher, https://etcher.balena.io/ Brave, https://brave.com/ brms, https://paul-buerkner.github.io/brms/index.html ( CRAN ) Calibre, https://calibre-ebook.com/ Click, https://click.palletsprojects.com ConvertWizard, https://convertwizard.com/ Dillinger, https://dillinger.io/ Django 5.0 documentation, https://docs.djangoproject.com/en/5.0/ DjVuLibre, http://djvu.sourceforge.net/ djvu.org, http://djvu.org EPUB reader online, https://epub-reader.online/ Equation Editor, https://editor.codecogs.com/ Flapjax, https://www.flapjax-lang.org/ Flask, https://flask.palletsprojects.com/ Foxit, https://www.foxitsoftware.com/ FreeCAD, https://www.freecad.org/ gravis, https://robert-haas.github.io/gravis-docs/index.html HivisionIDPhotos, https://swanhub.co/ZeYiLin/HivisionIDPhotos ( GitHub Icecream PDF Editor, https://icecreamapps.com/PDF-Editor/ iLoveIMG, https://www.iloveimg.com/ iLovePDF, https://www.ilovepdf.com/ IMGonline.com.ua, https://www.imgonline.com.ua/eng/resize-image.php INBO tutorials, https://inbo.github.io/tutorials/ Jinja, https://jinja.palletsprojects.com/ jqplay, https://jqplay.org/ JSEditor, https://jseditor.io/ Krite, https://krita.org/en/ MConverter, https://mconverter.eu/ ( docx2html ) Mermaid, https://mermaid.js.org/ (live editor, https://mermaid.live/ ) MobaXterm, https://mobaxterm.mobatek.net/ nbviewer, https://nbviewer.org/ OddPrints, https://www.oddprints.com/ Oh My Posh, https://ohmyposh.dev/ Online2PDF, https://online2pdf.com/docx2pdf PaddleOCR, https://github.com/PaddlePaddle/PaddleOCR PDF24, https://en.pdf24.org/ PDF2DJVU, https://pdf2djvu.com/ PDFgear, https://www.pdfgear.com/ PDF-XCHANGE, https://www.pdf-xchange.com/ (discontinued viewer ) PNG2PDF, https://png2pdf.com/ PhotoScissors, https://photoscissors.com/ QGIS, https://www.qgis.org/ Quarto, https://quarto.org/ RStudio, https://posit.co/ ( https://www.rstudio.com/ ) download [Mastering Shiny](https://mastering-shiny.org/ Positron shinyapps Rufus, https://rufus.ie/en/ Scribus, https://sourceforge.net/projects/scribus/ Speedtest, https://www.speedtest.net/ Template-Rcpp, https://github.com/stsds/Template-Rcpp (LinkedIn post ) text-utils, https://www.text-utils.com/ time.is, https://time.is/ Tabby, https://tabby.sh Typora, https://typora.io/ uLisp, http://www.ulisp.com/ WebGL, https://get.webgl.org/ WinToHDD, https://www.easyuefi.com/wintohdd/index.html (also easyUEFI ) WSGI, https://wsgi.readthedocs.io/ Yaak, https://yaak.app/ yozosoft, https://www.yozosoft.com/ zotero-gpt, https://github.com/MuiseDestiny/zotero-gpt","title":"Online documentation/software/utilities"},{"location":"INSTALL/","text":"Installation notes This section lists software which serve as backbone for a variety of projects including those in genetics. Illustration is given for some under Ubutun except R-devel which is with Fedora whose C/C++ version is higher. Environment modules Web: https://modules.readthedocs.io/en/latest/ It is preferable to allow for installation of multiple applications. The following scripts show how this is done under Ubunto. wget http://archive.ubuntu.com/ubuntu/pool/universe/m/modules/modules_5.2.0.orig.tar.xz tar xf modules_5.2.0.orig.tar.xz cd modules-5.2.0 sudo apt-get install tcl-dev tk-dev ./configure make sudo make install ls /usr/local/Modules source /usr/local/Modules/init/bash sudo ln -s /usr/local/Modules/init/profile.sh /etc/profile.d/modules.sh sudo ln -s /usr/local/Modules/init/profile.csh /etc/profile.d/modules.csh echo -e \"\\n# For initiating Modules\" | sudo tee -a /etc/bash.bashrc > /dev/null # Append a line to the end of this file with no return message. echo \". /etc/profile.d/modules.sh\" | sudo tee -a /etc/bash.bashrc > /dev/null less /usr/local/Modules/init/profile.sh module avail module list According to https://www.microbialsystems.cn/en/post/xubuntu_env_modules/ . Instances at work is shown here, https://cambridge-ceu.github.io/csd3/systems/ceuadmin.html . Armadillo It is available with sudo apt install libarmadillo-dev boost It is installed with sudo apt install libboost-all-dev To install it manually from source, as for a particular version, https://stackoverflow.com/questions/12578499/how-to-install-boost-on-ubuntu wget https://sourceforge.net/projects/boost/files/boost/1.58.0/boost_1_58_0.tar.gz tar xvfz boost_1_58_0.tar.gz cd boost_1_58_0 # ./b2 -h gives more options ./bootstrap.sh --prefix=/scratch/jhz22 ./b2 With a successful built, the following directory is suggested to be added to compiler include paths: boost_1_58_0 The following directory should be added to linker library paths: boost_1_58_0/stage/lib and we can test with example #include #include using namespace std; int main(){ boost::array arr = {{1,2,3,4}}; cout << \"hi\" << arr[0]; return 0; } eigen It is installed with sudo apt install libeigen3-dev GMP/MPFR One can start usual from https://gmplib.org/ and https://www.mpfr.org/. sudo apt install libgmp-dev sudo apt install libmpfr-dev then one can install Rmpfr. When installing as non-Admin, make sure issuing 'make check' for both libraries. As MPFR is dependent on GMP, it is necessary to use cd /home/jhz22/Downloads/mpfr-4.0.1 ./configure --prefix=/scratch/jhz22 --with-gmp-build=/home/jhz22/Downloads/gmp-6.1.2 make check for instance. GSL sudo apt install libgsl-dev JAGS-4.3.0 These are required at least under Federa 28, sudo dnf install automake sudo dnf install lapack-devel sudo dnf install mercurial It is actually available from Ubuntu archive, i.e., sudo apt install jags sudo apt-get install r-cran-rjags We can also work with sourceforge, wget https://sourceforge.net/projects/mcmc-jags/files/JAGS/4.x/Source/JAGS-4.3.0.tar.gz tar xvfz JAGS-4.3.0.tar.gz cd JAGS-4.3.0 LDFLAGS=\"-L/scratch/jhz22/lib64\" ./configure --prefix=/scratch/jhz22 --with-blas=-lblas --with-lapack=-llapack make make install Under MKL, we have #22-7-2014 MRC-Epid JHZ export MKL_NUM_THREAD=15 export MKL=/home/jhz22/intel/composer_xe_2013.4.183/mkl export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$MKLROOT/lib/intel64 /genetics/data/software/intel/composer_xe_2013.4.183/mkl/bin/mklvars.sh intel64 ./configure --prefix=/home/jhz22 --disable-shared --with-lapack \\ --with-blas=\"-fopenmp -m64 -I$MKL/include -L$MKL/lib/intel64 -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lpthread -lm\" make It turns out the easiest to install rjags package is to download it and work manually, e.g., R --no-save < Makevars cd ../.. R CMD INSTALL rjags The rjags package can also be installed as follows, export PKG_CONFIG_PATH=/scratch/jhz22/lib/pkgconfig R CMD INSTALL rjags_4-6.tar.gz --configure-args='CPPFLAGS=\"-fPIC\" LDFLAGS=\"-L/scratch/jhz22/lib -ljags\" --with-jags-prefix=/scratch/jhz22 --with-jags-libdir=/scratch/jhz22/lib --with-jags-includedir=/scratch/jhz22/include' It may still be difficult to install, and we can try manually, tar xfz rjags_4-6.tar.gz cd rjags mv configure configure.bak echo PKG_CPPFLAGS=-fPIC -I/scratch/$USER/include/JAGS > src/Makevars echo PKG_LIBS=-L/scratch/$USER/lib -ljags >> src/Makevars cd - R CMD INSTALL rjags After this, rjags should install as with R2jags. We can also install JAGS-related packages by establishing an Makevars in the src directory, e.g., R --no-save < #ifndef JAGS_MAJOR #define JAGS_MAJOR 4 #endif #define JAGS_MAJOR_FORCED 0 where the Makevars.runjags has the following lines PKG_CPPFLAGS=-I/scratch/jhz22/include PKG_LIBS=-L/scratch/jhz22/lib -ljags OBJECTS= distributions/jags/DFunction.o distributions/jags/DPQFunction.o distributions/jags/PFunction.o distributions/jags/QFunction.o distributions/jags/RScalarDist.o distributions/DPar1.o distributions/DPar2.o distributions/DPar3.o distributions/DPar4.o distributions/DLomax.o distributions/DMouchel.o distributions/DGenPar.o distributions/DHalfCauchy.o runjags.o testrunjags.o To get around these, one can mirror installation of rjags using the fact that runjags simply calls libjags.so though the source seemed for JAGS 3.x.x., export PKG_CONFIG_PATH=/rds-d4/user/jhz22/hpc-work/lib/pkgconfig export LDFLAGS=\"-L/rds-d4/user/jhz22/hpc-work/lib -ljags -lblas -llapack\" R CMD INSTALL runjags_2.0.4-2.tar.gz --configure-args=' --with-jags-prefix=/rds-d4/user/jhz22/hpc-work --with-jags-libdir=/rds-d4/user/jhz22/hpc-work/lib --with-jags-includedir=/rds-d4/user/jhz22/hpc-work/include' but somehow runjags is always points to lib64 for libjags.so, so when libjags.so is in lib instead it is necessary to create symbolic links from lib64. BLAS and LAPACK The pre-built version is straightforward for Fedora with sudo dnf install blas-devel sudo dnf install lapack-devel and the counterpart for Ubuntu is sudo apt install libblas-dev sudo apt install liblapack-dev To install from http://www.netlib.org/lapack/, we proceed as follows, wget http://www.netlib.org/lapack/lapack-3.8.0.tar.gz tar xvfz lapack-3.8.0.tar.gz cd lapack-3.8.0 mkdir build cd build ## ccmake . cmake .. make make install It is necessary to invoke ccmake .. to change the default static to dyanmic library as well as target directory. However, in case this is working, one can proceed as follows, cmake -DCMAKE_INSTALL_PREFIX=/rds-d4/user/jhz22/hpc-work -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=ON -DCBLAS=ON -DLAPACKE=ON .. make make install MKL One can consult Intel\u00ae Math Kernel Library Link Line Advisor and Free access to Intel\u00ae Compilers, Performance libraries, Analysis tools and more... . For instance, it is conviently available from Anaconda, conda install -c intel mkl Example use with R under RHEL, # export OMP_NUM_THREADS=6 export MKL_NUM_THREADS=15 export MKLROOT=/genetics/data/software/intel/composer_xe_2013.4.183/mkl export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$MKLROOT/lib/intel64 /genetics/data/software/intel/composer_xe_2013.4.183/mkl/bin/mklvars.sh intel64 ./configure --prefix=/genetics/data/software --enable-R-shlib --enable-threads=posix --with-lapack \\ --with-blas=\"-fopenmp -m64 -I$MKLROOT/include -L$MKLROOT/lib/intel64 -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lpthread -lm\" make make install and # https://software.intel.com/en-us/articles/build-r-301-with-intel-c-compiler-and-intel-mkl-on-linux# export ICC_OPT=\"-mkl -xHOST -fp-model strict\" export CC=\"icc $ICC_OPT\" export CXX=\"icpc $ICC_OPT\" export FC=\"ifort -mkl -xHOST\" export F77=\"ifort -mkl -xHOST\" export FPICFLAGS=\" -fPIC\" export AR=xiar export LD=xild export MKL=\"-lmkl_gf_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread\" ./configure --prefix=/home/jhz22/R-devel --enable-R-shlib --with-x=no --with-blas=-lmkl LDFLAGS=-L/home/jhz22/lib CPPFLAGS=-I/home/jhz22/include For Windows, see https://software.intel.com/content/www/us/en/develop/documentation/get-started-with-mkl-for-windows/top.html. The benchmark is available from here, https://github.com/pachamaltese/r-with-intel-mkl/blob/master/00-benchmark-scripts/1-r-benchmark-25.R. cd \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\" rename Rblas.dll Rblas.dll.orig rename Rlapack.dll Rlapack.dll.orig cd \"C:\\Program Files (x86)\\IntelSWTools\\compilers_and_libraries\\windows\\redist\\intel64_win\\mkl\" copy mkl_rt.dll \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\\Rblas.dll\" copy mkl_rt.dll \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\\Rlapack.dll\" copy mkl_intel_thread.dll \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\" making this known the PATH. NLopt Available from https://nlopt.readthedocs.io/en/latest/ with R counterpart from https://cran.r-project.org/web/packages/nloptr/index.html. GNU Octave It is available with, sudo apt install octave PSPP Under Ubuntu, this can be made available with sudo apt install pspp For Fedora, we have sudo dnf install pspp which will install libpq, gsl, gtksourceview3, spread-sheet-widget as well, see https://apps.fedoraproject.org/packages/pspp. Two simple SPSS command files example.sps and plot.sps can be called with pspp example.sps psppire plot.sps showing CLI and GUI, respectively. Related utilities are pspp-convert . It is possible to compile it directly by using gtksourceview 4.0.3 (4.4.0 is more demanding with Python 3.5, meson, Vala, etc.) and use PKG_CONFIG_PATH when appropriate spread-sheet-widget-0.3 fribidi-1.0.8 GTKSOURVIEW_CFLAGS and GTKSOURVIEW_LIBS in the configuration. export PREFIX=/rds/user/$USER/hpc-work export GTKSOURCEVIEW_CFLAGS=-I${PREFIX}/includegtksourceview-4 export GTKSOURCEVIEW_LIBS=\"-L${PREFIX}/lib -lgtksourceview-4\" ./configure --prefix=${PREFIX} make make install note that it is necessary to comment on the statement kludge = gtk_source_view_get_type (); from src/ui/gui/widgets.c and to remove the PREFIX= speficiation in the Perl part of compiling, i.e, cd perl-module /usr/bin/perl Makefile.PL PREFIX=/rds/user/$USER/hpc-work OPTIMIZE=\"-g -O2 -I/rds-d4/user/$USER/hpc-work/include/fribidi -I/usr/include/cairo -I/usr/include/glib-2.0 -I/usr/lib64/glib-2.0/include -I/usr/include/pixman-1 -I/usr/include/freetype2 -I/usr/include/libpng15 -I/usr/include/uuid -I/usr/include/libdrm -I/usr/include/pango-1.0 -I/usr/include/harfbuzz \" A more recent description is here, https://cambridge-ceu.github.io/csd3/applications/pspp.html . python A useful resource is code from Pattern Recognition and Machine Learning . It is possible to conduct survival analysis with lifelines , pip install lifelines R Fedora 31 The R-release, including both the compiled and source package, is built as follows, sudo dnf install R sudo dnf install R-devel while the following are necessary to build the development version , sudo dnf install gcc-c++ sudo dnf install gcc-gfortran sudo dnf install pcre-devel sudo dnf install java-1.8.0-openjdk-devel sudo dnf install readline-devel sudo dnf install libcurl-devel sudo dnf install libX11-devel sudo dnf install libXt-devel sudo dnf install bzip2-devel sudo dnf install xz-devel sudo dnf install pandoc sudo dnf install qpdf sudo dnf install texlive-collection-latex sudo dnf install texlive-collection-fontsextra sudo dnf install texinfo-tex sudo dnf install texlive-collection-fontsrecommended sudo dnf install texlive-collection-latexrecommended ./configure This is necessary since gcc 9 is available and required for CRAN package submission , e.g., # R-release to build R CMD build gap # R-devel to check ln -s $HOME/R/R-devel/bin/R $HOME/bin/R-devel R-devel CMD check --as-cran gap_1.1-22.tar.gz For R-devel, these can be used explicitly, export CC=\"/usr/bin/gcc\" export CXX=\"/usr/bin/g++\" export FC=\"/usr/bin/gfortran\" export CFLAGS=\"-g -O2 -Wall -pedantic -mtune=native\" export FFLAGS=\"-g -O2 -mtune=native -Wall -pedantic\" export CXXFLAGS=\"-g -O2 -Wall -pedantic -mtune=native -Wno-ignored-attributes -Wno-deprecated-declarations -Wno-parentheses\" export LDFLAGS=\"-L/usr/lib64\" R-devel CMD INSTALL gap_1.2.tar.gz with check on foreign language calls or R-devel CMD INSTALL --configure-args=\" CC=\\\"/usr/bin/gcc\\\" \\ CXX=\\\"/usr/bin/g++\\\" \\ FC=\\\"/usr/bin/gfortran\\\" \\ CFLAGS=\\\"-g -O2 -Wall -pedantic -mtune=native\\\" \\ FFLAGS=\\\"-g -O2 -mtune=native -Wall -pedantic\\\" \\ CXXFLAGS=\\\"-I/usr/include -g -O2 -Wall -pedantic -mtune=native -Wno-ignored-attributes -Wno-deprecated-declarations -Wno-parentheses\\\" \\ LDFLAGS=\\\"-L/usr/lib64\\\" gap_1.1-26.tar.gz ``` which is more restrictive than the default --as-cran above. A simpler setup is also possible with `~/.R/Makevars`, e.g., ```bash CC = gcc CXX = g++ CXX11 = g++ FC = gfortran F77 = gfortran F90 = gfortran CFLAGS = -std=c99 -I/usr/include -g -O2 -Wall -pedantic -mtune=native -Wno-ignored-attributes -Wno-deprecated-declarations -Wno-parentheses -Wimplicit-function-declaration CXXFLAGS = -std=c++11 Another example is as follows, module load texlive ./configure --prefix=/rds-d4/user/jhz22/hpc-work \\ --enable-R-shlib \\ CPPFLAGS=-I/rds-d4/user/jhz22/hpc-work/include \\ LDFLAGS=-L/rds-d4/user/jhz22/hpc-work/lib On Fedora 35, we see the following messages from R CMD check gap_1.2.3-6.tar.gz , Error(s) in re-building vignettes: ... --- re-building \u2018gap.Rmd\u2019 using rmarkdown Quitting from lines 273-279 (gap.Rmd) Error: processing vignette 'gap.Rmd' failed with diagnostics: X11 font -adobe-helvetica-%s-%s-*-*-%d-*-*-*-*-*-*-*, face 1 at size 5 could not be loaded --- failed re-building \u2018gap.Rmd\u2019 --- re-building \u2018shinygap.Rmd\u2019 using rmarkdown --- finished re-building \u2018shinygap.Rmd\u2019 --- re-building \u2018jss.Rnw\u2019 using Sweave --- finished re-building \u2018jss.Rnw\u2019 SUMMARY: processing the following file failed: \u2018gap.Rmd\u2019 Error: Vignette re-building failed. Execution halted * checking PDF version of manual ... OK * checking HTML version of manual ... NOTE Skipping checking HTML validation: no command 'tidy' found Skipping checking math rendering: package 'V8' unavailable * checking for non-standard things in the check directory ... OK * checking for detritus in the temp directory ... OK * DONE This is resolved by sudo dnf install v8-devel sudo dnf install xorg-x11-fonts* Rscript -e 'install.packages(c(\"shniy\",\"V8\"),repos=\"https://cran.r-project.org\")' Ubuntu 18.04 The R environment is furnished with sudo apt install r-base-core sudo apt install r-base-dev and R_LIBS is set from .bashrc export R_LIBS=/usr/local/lib/R/site-library/ Note that in fact html.start() in R points to /usr/local/lib/R/library/ instead, see below example in MendelianRandomization . To enable R-devel/package building, these are necessary sudo apt install g++ sudo apt install gfortran sudo apt install texlive sudo apt install texlive-fonts-extra sudo apt install texinfo sudo apt install texlive-fonts-recommended sudo apt install libreadline-dev To set up bzip2, lzma/pcre, curl and then R assuming lapack is already installed, # compile shared library Makefile-libbz2_so and then add -FPIC to CC and recompile # bzip2 # make # make install PREFIX=$SHOME # xz # ./configure --prefix=SHOME/xz-5.2.3 # make -j3 # make install # pcre # ./configure --prefix=$SHOME --enable-utf8 # curl # ./configure --prefix=$SHOME --with-ssl # make && make install ./configure --prefix=/scratch/jhz22 --enable-R-shlib CPPFLAGS=\"-I/scratch/jhz22/include\" LDFLAGS=\"-L/scratch/jhz22/lib\" Windows To build packages on Windows, download Rtools from https://cran.r-project.org/ and install to C:\\Rtools rem 22/8/2019 JHZ set path=C:\\Program Files\\R\\R-3.6.1\\bin;c:\\Rtools\\bin;%PATH%;c:\\Rtools\\mingw_64\\bin;c:\\Rtools\\mingw_32\\bin set lib=c:\\Rtools\\mingw_64\\lib;c:\\Rtools\\mingw_32\\include set include=c:\\Rtools\\mingw_64\\include;c:\\Rtools\\mingw_32\\include We can then run R CMD INSTALL --binary gap , say. It seems the --arch x84 option is very useful for using all available RAM; to make sure use call such as D:\\Program Files\\R\\R-3.5.0\\bin\\x64\\R.exe\" . When this fails, remove large objects in your code and start R with --vanilla option. To upgrade R, it is useful to install installr for its updateR() . Package installation CRAN . It is typically done with install.packages() install.packages(\"ggplot2\",INSTALL_opts=\"--library=/usr/local/lib/R/site-library/\") Bioconductor . This is done with biocLite . source(\"https://bioconductor.org/biocLite.R\") biocLite(\"packagename\") From R 3.5 or greater there is BiocManager, if (!requireNamespace(\"BiocManager\", quietly = TRUE)) install.packages(\"BiocManager\") BiocManager::install() See https://bioconductor.org/install/. Lastly, it is possible with devtools::install_bioc() . GitHub . We could set this up via sudo apt install r-cran-devtools . This is then through devtools::install_github() . library(devtools) install_github(\"MRCIEU/TwoSampleMR\",args=\"--library=/usr/local/lib/R/site-library\",force=TRUE) with dedicated location(s); however this is not always the case and an alternative is to use sudo R CMD INSTALL -l $R_LIBS to install into $R_LIBS. It is possible to point to a package, locally or remotely, e.g, install.packages(\"http://cnsgenomics.com/software/gsmr/static/gsmr_1.0.6.tar.gz\",repos=NULL,type=\"source\") whose first argument is a URL. Multiple precision arithmetic . This is modified from notes on SCALLOP-INF analysis. sudo apt install libmpfr-dev R --no-save < install.packages(\"plotly\") --- Please select a CRAN mirror for use in this session --- Error in structure(.External(.C_dotTclObjv, objv), class = \"tclObj\") : [tcl] bad pad value \"2m\": must be positive screen distance. but can be avoided with specificatino of repository. > install.packages(\"plotly\", repos=\"https://cran.r-project.org\") RStudio The distribution has problem loading or creating R script, so it is tempting to install from https://github.com/rstudio/rstudio/. This involves running scripts under directory dependencies/, ./install-dependencies-debian --exclude-qt-sdk and then the following steps, mkdir build cd build cmake .. -DRSTUDIO_TARGET=Desktop -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local/lib/rstudio However, there is error with Java and Java 8 is required, see https://tecadmin.net/install-oracle-java-8-ubuntu-via-ppa/. sudo add-apt-repository ppa:webupd8team/java sudo apt-get update sudo apt-get install oracle-java8-installer sudo apt-get install oracle-java8-set-default java -version However, compile error is still persistent except when dropping the option --exclude-qt-sdk but unloadable. It is therefore recommended to get around with RStudio daily builds, https://dailies.rstudio.com/. SageMath sudo apt install sagemath stan cmdstan is now available from https://github.com/stan-dev/cmdstan along with other repositories there. Interfaces are listed at http://mc-stan.org/users/interfaces/index.html. Information on installing RStan is described here, https://github.com/stan-dev/rstan/wiki/Installing-RStan-on-Linux On our HPC system under gcc 4.8.5 there are error message > library(rstan) Loading required package: ggplot2 Registered S3 methods overwritten by 'ggplot2': method from [.quosures rlang c.quosures rlang print.quosures rlang Loading required package: StanHeaders Error: package or namespace load failed for \u2018rstan\u2019 in dyn.load(file, DLLpath = DLLpath, ...): unable to load shared object '/rds-d4/user/jhz22/hpc-work/R/rstan/libs/rstan.so': /usr/lib64/libstdc++.so.6: version `GLIBCXX_3.4.20' not found (required by /rds-d4/user/jhz22/hpc-work/R/rstan/libs/rstan.so) > q() which can be resolved with module load gcc/5.2.0 before invoking R. For error message C++14 standard requested but CXX14 is not defined we modify $HOME/.R/Makevars as follows, CXX14 = g++ -std=c++1y -fPIC see https://github.com/stan-dev/rstan/issues/569 but adding -fPIC and as in unixOBD below. unixODBC It is quite standard to install, i.e., wget ftp://ftp.unixodbc.org/pub/unixODBC/unixODBC-2.3.7.tar.gz tar xvfz unixODBC-2.3.7.tar.gz cd unixODBC-2.3.7 ./configure --prefix=/scratch/jhz22 make make install There have been many discussions regarding \"C++11 standard requested but CXX11 is not defined\" and this could be fixed with changes to $R_HOME/etc/Makeconf such that CXX11 = g++ -std=c++11 -fPIC then module load gcc/5.2.0 R CMD INSTALL odbc This is necessary for gtx for instance. zlib Try sudo apt-get install libz-dev","title":"INSTALL"},{"location":"INSTALL/#installation-notes","text":"This section lists software which serve as backbone for a variety of projects including those in genetics. Illustration is given for some under Ubutun except R-devel which is with Fedora whose C/C++ version is higher.","title":"Installation notes"},{"location":"INSTALL/#environment-modules","text":"Web: https://modules.readthedocs.io/en/latest/ It is preferable to allow for installation of multiple applications. The following scripts show how this is done under Ubunto. wget http://archive.ubuntu.com/ubuntu/pool/universe/m/modules/modules_5.2.0.orig.tar.xz tar xf modules_5.2.0.orig.tar.xz cd modules-5.2.0 sudo apt-get install tcl-dev tk-dev ./configure make sudo make install ls /usr/local/Modules source /usr/local/Modules/init/bash sudo ln -s /usr/local/Modules/init/profile.sh /etc/profile.d/modules.sh sudo ln -s /usr/local/Modules/init/profile.csh /etc/profile.d/modules.csh echo -e \"\\n# For initiating Modules\" | sudo tee -a /etc/bash.bashrc > /dev/null # Append a line to the end of this file with no return message. echo \". /etc/profile.d/modules.sh\" | sudo tee -a /etc/bash.bashrc > /dev/null less /usr/local/Modules/init/profile.sh module avail module list According to https://www.microbialsystems.cn/en/post/xubuntu_env_modules/ . Instances at work is shown here, https://cambridge-ceu.github.io/csd3/systems/ceuadmin.html .","title":"Environment modules"},{"location":"INSTALL/#armadillo","text":"It is available with sudo apt install libarmadillo-dev","title":"Armadillo"},{"location":"INSTALL/#boost","text":"It is installed with sudo apt install libboost-all-dev To install it manually from source, as for a particular version, https://stackoverflow.com/questions/12578499/how-to-install-boost-on-ubuntu wget https://sourceforge.net/projects/boost/files/boost/1.58.0/boost_1_58_0.tar.gz tar xvfz boost_1_58_0.tar.gz cd boost_1_58_0 # ./b2 -h gives more options ./bootstrap.sh --prefix=/scratch/jhz22 ./b2 With a successful built, the following directory is suggested to be added to compiler include paths: boost_1_58_0 The following directory should be added to linker library paths: boost_1_58_0/stage/lib and we can test with example #include #include using namespace std; int main(){ boost::array arr = {{1,2,3,4}}; cout << \"hi\" << arr[0]; return 0; }","title":"boost"},{"location":"INSTALL/#eigen","text":"It is installed with sudo apt install libeigen3-dev","title":"eigen"},{"location":"INSTALL/#gmpmpfr","text":"One can start usual from https://gmplib.org/ and https://www.mpfr.org/. sudo apt install libgmp-dev sudo apt install libmpfr-dev then one can install Rmpfr. When installing as non-Admin, make sure issuing 'make check' for both libraries. As MPFR is dependent on GMP, it is necessary to use cd /home/jhz22/Downloads/mpfr-4.0.1 ./configure --prefix=/scratch/jhz22 --with-gmp-build=/home/jhz22/Downloads/gmp-6.1.2 make check for instance.","title":"GMP/MPFR"},{"location":"INSTALL/#gsl","text":"sudo apt install libgsl-dev","title":"GSL"},{"location":"INSTALL/#jags-430","text":"These are required at least under Federa 28, sudo dnf install automake sudo dnf install lapack-devel sudo dnf install mercurial It is actually available from Ubuntu archive, i.e., sudo apt install jags sudo apt-get install r-cran-rjags We can also work with sourceforge, wget https://sourceforge.net/projects/mcmc-jags/files/JAGS/4.x/Source/JAGS-4.3.0.tar.gz tar xvfz JAGS-4.3.0.tar.gz cd JAGS-4.3.0 LDFLAGS=\"-L/scratch/jhz22/lib64\" ./configure --prefix=/scratch/jhz22 --with-blas=-lblas --with-lapack=-llapack make make install Under MKL, we have #22-7-2014 MRC-Epid JHZ export MKL_NUM_THREAD=15 export MKL=/home/jhz22/intel/composer_xe_2013.4.183/mkl export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$MKLROOT/lib/intel64 /genetics/data/software/intel/composer_xe_2013.4.183/mkl/bin/mklvars.sh intel64 ./configure --prefix=/home/jhz22 --disable-shared --with-lapack \\ --with-blas=\"-fopenmp -m64 -I$MKL/include -L$MKL/lib/intel64 -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lpthread -lm\" make It turns out the easiest to install rjags package is to download it and work manually, e.g., R --no-save < Makevars cd ../.. R CMD INSTALL rjags The rjags package can also be installed as follows, export PKG_CONFIG_PATH=/scratch/jhz22/lib/pkgconfig R CMD INSTALL rjags_4-6.tar.gz --configure-args='CPPFLAGS=\"-fPIC\" LDFLAGS=\"-L/scratch/jhz22/lib -ljags\" --with-jags-prefix=/scratch/jhz22 --with-jags-libdir=/scratch/jhz22/lib --with-jags-includedir=/scratch/jhz22/include' It may still be difficult to install, and we can try manually, tar xfz rjags_4-6.tar.gz cd rjags mv configure configure.bak echo PKG_CPPFLAGS=-fPIC -I/scratch/$USER/include/JAGS > src/Makevars echo PKG_LIBS=-L/scratch/$USER/lib -ljags >> src/Makevars cd - R CMD INSTALL rjags After this, rjags should install as with R2jags. We can also install JAGS-related packages by establishing an Makevars in the src directory, e.g., R --no-save < #ifndef JAGS_MAJOR #define JAGS_MAJOR 4 #endif #define JAGS_MAJOR_FORCED 0 where the Makevars.runjags has the following lines PKG_CPPFLAGS=-I/scratch/jhz22/include PKG_LIBS=-L/scratch/jhz22/lib -ljags OBJECTS= distributions/jags/DFunction.o distributions/jags/DPQFunction.o distributions/jags/PFunction.o distributions/jags/QFunction.o distributions/jags/RScalarDist.o distributions/DPar1.o distributions/DPar2.o distributions/DPar3.o distributions/DPar4.o distributions/DLomax.o distributions/DMouchel.o distributions/DGenPar.o distributions/DHalfCauchy.o runjags.o testrunjags.o To get around these, one can mirror installation of rjags using the fact that runjags simply calls libjags.so though the source seemed for JAGS 3.x.x., export PKG_CONFIG_PATH=/rds-d4/user/jhz22/hpc-work/lib/pkgconfig export LDFLAGS=\"-L/rds-d4/user/jhz22/hpc-work/lib -ljags -lblas -llapack\" R CMD INSTALL runjags_2.0.4-2.tar.gz --configure-args=' --with-jags-prefix=/rds-d4/user/jhz22/hpc-work --with-jags-libdir=/rds-d4/user/jhz22/hpc-work/lib --with-jags-includedir=/rds-d4/user/jhz22/hpc-work/include' but somehow runjags is always points to lib64 for libjags.so, so when libjags.so is in lib instead it is necessary to create symbolic links from lib64.","title":"JAGS-4.3.0"},{"location":"INSTALL/#blas-and-lapack","text":"The pre-built version is straightforward for Fedora with sudo dnf install blas-devel sudo dnf install lapack-devel and the counterpart for Ubuntu is sudo apt install libblas-dev sudo apt install liblapack-dev To install from http://www.netlib.org/lapack/, we proceed as follows, wget http://www.netlib.org/lapack/lapack-3.8.0.tar.gz tar xvfz lapack-3.8.0.tar.gz cd lapack-3.8.0 mkdir build cd build ## ccmake . cmake .. make make install It is necessary to invoke ccmake .. to change the default static to dyanmic library as well as target directory. However, in case this is working, one can proceed as follows, cmake -DCMAKE_INSTALL_PREFIX=/rds-d4/user/jhz22/hpc-work -DCMAKE_BUILD_TYPE=RELEASE -DBUILD_SHARED_LIBS=ON -DCBLAS=ON -DLAPACKE=ON .. make make install","title":"BLAS and LAPACK"},{"location":"INSTALL/#mkl","text":"One can consult Intel\u00ae Math Kernel Library Link Line Advisor and Free access to Intel\u00ae Compilers, Performance libraries, Analysis tools and more... . For instance, it is conviently available from Anaconda, conda install -c intel mkl Example use with R under RHEL, # export OMP_NUM_THREADS=6 export MKL_NUM_THREADS=15 export MKLROOT=/genetics/data/software/intel/composer_xe_2013.4.183/mkl export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$MKLROOT/lib/intel64 /genetics/data/software/intel/composer_xe_2013.4.183/mkl/bin/mklvars.sh intel64 ./configure --prefix=/genetics/data/software --enable-R-shlib --enable-threads=posix --with-lapack \\ --with-blas=\"-fopenmp -m64 -I$MKLROOT/include -L$MKLROOT/lib/intel64 -lmkl_gf_lp64 -lmkl_gnu_thread -lmkl_core -lpthread -lm\" make make install and # https://software.intel.com/en-us/articles/build-r-301-with-intel-c-compiler-and-intel-mkl-on-linux# export ICC_OPT=\"-mkl -xHOST -fp-model strict\" export CC=\"icc $ICC_OPT\" export CXX=\"icpc $ICC_OPT\" export FC=\"ifort -mkl -xHOST\" export F77=\"ifort -mkl -xHOST\" export FPICFLAGS=\" -fPIC\" export AR=xiar export LD=xild export MKL=\"-lmkl_gf_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread\" ./configure --prefix=/home/jhz22/R-devel --enable-R-shlib --with-x=no --with-blas=-lmkl LDFLAGS=-L/home/jhz22/lib CPPFLAGS=-I/home/jhz22/include For Windows, see https://software.intel.com/content/www/us/en/develop/documentation/get-started-with-mkl-for-windows/top.html. The benchmark is available from here, https://github.com/pachamaltese/r-with-intel-mkl/blob/master/00-benchmark-scripts/1-r-benchmark-25.R. cd \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\" rename Rblas.dll Rblas.dll.orig rename Rlapack.dll Rlapack.dll.orig cd \"C:\\Program Files (x86)\\IntelSWTools\\compilers_and_libraries\\windows\\redist\\intel64_win\\mkl\" copy mkl_rt.dll \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\\Rblas.dll\" copy mkl_rt.dll \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\\Rlapack.dll\" copy mkl_intel_thread.dll \"C:\\Program Files\\R\\R-4.0.0\\bin\\x64\" making this known the PATH.","title":"MKL"},{"location":"INSTALL/#nlopt","text":"Available from https://nlopt.readthedocs.io/en/latest/ with R counterpart from https://cran.r-project.org/web/packages/nloptr/index.html.","title":"NLopt"},{"location":"INSTALL/#gnu-octave","text":"It is available with, sudo apt install octave","title":"GNU Octave"},{"location":"INSTALL/#pspp","text":"Under Ubuntu, this can be made available with sudo apt install pspp For Fedora, we have sudo dnf install pspp which will install libpq, gsl, gtksourceview3, spread-sheet-widget as well, see https://apps.fedoraproject.org/packages/pspp. Two simple SPSS command files example.sps and plot.sps can be called with pspp example.sps psppire plot.sps showing CLI and GUI, respectively. Related utilities are pspp-convert . It is possible to compile it directly by using gtksourceview 4.0.3 (4.4.0 is more demanding with Python 3.5, meson, Vala, etc.) and use PKG_CONFIG_PATH when appropriate spread-sheet-widget-0.3 fribidi-1.0.8 GTKSOURVIEW_CFLAGS and GTKSOURVIEW_LIBS in the configuration. export PREFIX=/rds/user/$USER/hpc-work export GTKSOURCEVIEW_CFLAGS=-I${PREFIX}/includegtksourceview-4 export GTKSOURCEVIEW_LIBS=\"-L${PREFIX}/lib -lgtksourceview-4\" ./configure --prefix=${PREFIX} make make install note that it is necessary to comment on the statement kludge = gtk_source_view_get_type (); from src/ui/gui/widgets.c and to remove the PREFIX= speficiation in the Perl part of compiling, i.e, cd perl-module /usr/bin/perl Makefile.PL PREFIX=/rds/user/$USER/hpc-work OPTIMIZE=\"-g -O2 -I/rds-d4/user/$USER/hpc-work/include/fribidi -I/usr/include/cairo -I/usr/include/glib-2.0 -I/usr/lib64/glib-2.0/include -I/usr/include/pixman-1 -I/usr/include/freetype2 -I/usr/include/libpng15 -I/usr/include/uuid -I/usr/include/libdrm -I/usr/include/pango-1.0 -I/usr/include/harfbuzz \" A more recent description is here, https://cambridge-ceu.github.io/csd3/applications/pspp.html .","title":"PSPP"},{"location":"INSTALL/#python","text":"A useful resource is code from Pattern Recognition and Machine Learning . It is possible to conduct survival analysis with lifelines , pip install lifelines","title":"python"},{"location":"INSTALL/#r","text":"","title":"R"},{"location":"INSTALL/#fedora-31","text":"The R-release, including both the compiled and source package, is built as follows, sudo dnf install R sudo dnf install R-devel while the following are necessary to build the development version , sudo dnf install gcc-c++ sudo dnf install gcc-gfortran sudo dnf install pcre-devel sudo dnf install java-1.8.0-openjdk-devel sudo dnf install readline-devel sudo dnf install libcurl-devel sudo dnf install libX11-devel sudo dnf install libXt-devel sudo dnf install bzip2-devel sudo dnf install xz-devel sudo dnf install pandoc sudo dnf install qpdf sudo dnf install texlive-collection-latex sudo dnf install texlive-collection-fontsextra sudo dnf install texinfo-tex sudo dnf install texlive-collection-fontsrecommended sudo dnf install texlive-collection-latexrecommended ./configure This is necessary since gcc 9 is available and required for CRAN package submission , e.g., # R-release to build R CMD build gap # R-devel to check ln -s $HOME/R/R-devel/bin/R $HOME/bin/R-devel R-devel CMD check --as-cran gap_1.1-22.tar.gz For R-devel, these can be used explicitly, export CC=\"/usr/bin/gcc\" export CXX=\"/usr/bin/g++\" export FC=\"/usr/bin/gfortran\" export CFLAGS=\"-g -O2 -Wall -pedantic -mtune=native\" export FFLAGS=\"-g -O2 -mtune=native -Wall -pedantic\" export CXXFLAGS=\"-g -O2 -Wall -pedantic -mtune=native -Wno-ignored-attributes -Wno-deprecated-declarations -Wno-parentheses\" export LDFLAGS=\"-L/usr/lib64\" R-devel CMD INSTALL gap_1.2.tar.gz with check on foreign language calls or R-devel CMD INSTALL --configure-args=\" CC=\\\"/usr/bin/gcc\\\" \\ CXX=\\\"/usr/bin/g++\\\" \\ FC=\\\"/usr/bin/gfortran\\\" \\ CFLAGS=\\\"-g -O2 -Wall -pedantic -mtune=native\\\" \\ FFLAGS=\\\"-g -O2 -mtune=native -Wall -pedantic\\\" \\ CXXFLAGS=\\\"-I/usr/include -g -O2 -Wall -pedantic -mtune=native -Wno-ignored-attributes -Wno-deprecated-declarations -Wno-parentheses\\\" \\ LDFLAGS=\\\"-L/usr/lib64\\\" gap_1.1-26.tar.gz ``` which is more restrictive than the default --as-cran above. A simpler setup is also possible with `~/.R/Makevars`, e.g., ```bash CC = gcc CXX = g++ CXX11 = g++ FC = gfortran F77 = gfortran F90 = gfortran CFLAGS = -std=c99 -I/usr/include -g -O2 -Wall -pedantic -mtune=native -Wno-ignored-attributes -Wno-deprecated-declarations -Wno-parentheses -Wimplicit-function-declaration CXXFLAGS = -std=c++11 Another example is as follows, module load texlive ./configure --prefix=/rds-d4/user/jhz22/hpc-work \\ --enable-R-shlib \\ CPPFLAGS=-I/rds-d4/user/jhz22/hpc-work/include \\ LDFLAGS=-L/rds-d4/user/jhz22/hpc-work/lib On Fedora 35, we see the following messages from R CMD check gap_1.2.3-6.tar.gz , Error(s) in re-building vignettes: ... --- re-building \u2018gap.Rmd\u2019 using rmarkdown Quitting from lines 273-279 (gap.Rmd) Error: processing vignette 'gap.Rmd' failed with diagnostics: X11 font -adobe-helvetica-%s-%s-*-*-%d-*-*-*-*-*-*-*, face 1 at size 5 could not be loaded --- failed re-building \u2018gap.Rmd\u2019 --- re-building \u2018shinygap.Rmd\u2019 using rmarkdown --- finished re-building \u2018shinygap.Rmd\u2019 --- re-building \u2018jss.Rnw\u2019 using Sweave --- finished re-building \u2018jss.Rnw\u2019 SUMMARY: processing the following file failed: \u2018gap.Rmd\u2019 Error: Vignette re-building failed. Execution halted * checking PDF version of manual ... OK * checking HTML version of manual ... NOTE Skipping checking HTML validation: no command 'tidy' found Skipping checking math rendering: package 'V8' unavailable * checking for non-standard things in the check directory ... OK * checking for detritus in the temp directory ... OK * DONE This is resolved by sudo dnf install v8-devel sudo dnf install xorg-x11-fonts* Rscript -e 'install.packages(c(\"shniy\",\"V8\"),repos=\"https://cran.r-project.org\")'","title":"Fedora 31"},{"location":"INSTALL/#ubuntu-1804","text":"The R environment is furnished with sudo apt install r-base-core sudo apt install r-base-dev and R_LIBS is set from .bashrc export R_LIBS=/usr/local/lib/R/site-library/ Note that in fact html.start() in R points to /usr/local/lib/R/library/ instead, see below example in MendelianRandomization . To enable R-devel/package building, these are necessary sudo apt install g++ sudo apt install gfortran sudo apt install texlive sudo apt install texlive-fonts-extra sudo apt install texinfo sudo apt install texlive-fonts-recommended sudo apt install libreadline-dev To set up bzip2, lzma/pcre, curl and then R assuming lapack is already installed, # compile shared library Makefile-libbz2_so and then add -FPIC to CC and recompile # bzip2 # make # make install PREFIX=$SHOME # xz # ./configure --prefix=SHOME/xz-5.2.3 # make -j3 # make install # pcre # ./configure --prefix=$SHOME --enable-utf8 # curl # ./configure --prefix=$SHOME --with-ssl # make && make install ./configure --prefix=/scratch/jhz22 --enable-R-shlib CPPFLAGS=\"-I/scratch/jhz22/include\" LDFLAGS=\"-L/scratch/jhz22/lib\"","title":"Ubuntu 18.04"},{"location":"INSTALL/#windows","text":"To build packages on Windows, download Rtools from https://cran.r-project.org/ and install to C:\\Rtools rem 22/8/2019 JHZ set path=C:\\Program Files\\R\\R-3.6.1\\bin;c:\\Rtools\\bin;%PATH%;c:\\Rtools\\mingw_64\\bin;c:\\Rtools\\mingw_32\\bin set lib=c:\\Rtools\\mingw_64\\lib;c:\\Rtools\\mingw_32\\include set include=c:\\Rtools\\mingw_64\\include;c:\\Rtools\\mingw_32\\include We can then run R CMD INSTALL --binary gap , say. It seems the --arch x84 option is very useful for using all available RAM; to make sure use call such as D:\\Program Files\\R\\R-3.5.0\\bin\\x64\\R.exe\" . When this fails, remove large objects in your code and start R with --vanilla option. To upgrade R, it is useful to install installr for its updateR() .","title":"Windows"},{"location":"INSTALL/#package-installation","text":"CRAN . It is typically done with install.packages() install.packages(\"ggplot2\",INSTALL_opts=\"--library=/usr/local/lib/R/site-library/\") Bioconductor . This is done with biocLite . source(\"https://bioconductor.org/biocLite.R\") biocLite(\"packagename\") From R 3.5 or greater there is BiocManager, if (!requireNamespace(\"BiocManager\", quietly = TRUE)) install.packages(\"BiocManager\") BiocManager::install() See https://bioconductor.org/install/. Lastly, it is possible with devtools::install_bioc() . GitHub . We could set this up via sudo apt install r-cran-devtools . This is then through devtools::install_github() . library(devtools) install_github(\"MRCIEU/TwoSampleMR\",args=\"--library=/usr/local/lib/R/site-library\",force=TRUE) with dedicated location(s); however this is not always the case and an alternative is to use sudo R CMD INSTALL -l $R_LIBS to install into $R_LIBS. It is possible to point to a package, locally or remotely, e.g, install.packages(\"http://cnsgenomics.com/software/gsmr/static/gsmr_1.0.6.tar.gz\",repos=NULL,type=\"source\") whose first argument is a URL. Multiple precision arithmetic . This is modified from notes on SCALLOP-INF analysis. sudo apt install libmpfr-dev R --no-save < install.packages(\"plotly\") --- Please select a CRAN mirror for use in this session --- Error in structure(.External(.C_dotTclObjv, objv), class = \"tclObj\") : [tcl] bad pad value \"2m\": must be positive screen distance. but can be avoided with specificatino of repository. > install.packages(\"plotly\", repos=\"https://cran.r-project.org\")","title":"Package installation"},{"location":"INSTALL/#rstudio","text":"The distribution has problem loading or creating R script, so it is tempting to install from https://github.com/rstudio/rstudio/. This involves running scripts under directory dependencies/, ./install-dependencies-debian --exclude-qt-sdk and then the following steps, mkdir build cd build cmake .. -DRSTUDIO_TARGET=Desktop -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local/lib/rstudio However, there is error with Java and Java 8 is required, see https://tecadmin.net/install-oracle-java-8-ubuntu-via-ppa/. sudo add-apt-repository ppa:webupd8team/java sudo apt-get update sudo apt-get install oracle-java8-installer sudo apt-get install oracle-java8-set-default java -version However, compile error is still persistent except when dropping the option --exclude-qt-sdk but unloadable. It is therefore recommended to get around with RStudio daily builds, https://dailies.rstudio.com/.","title":"RStudio"},{"location":"INSTALL/#sagemath","text":"sudo apt install sagemath","title":"SageMath"},{"location":"INSTALL/#stan","text":"cmdstan is now available from https://github.com/stan-dev/cmdstan along with other repositories there. Interfaces are listed at http://mc-stan.org/users/interfaces/index.html. Information on installing RStan is described here, https://github.com/stan-dev/rstan/wiki/Installing-RStan-on-Linux On our HPC system under gcc 4.8.5 there are error message > library(rstan) Loading required package: ggplot2 Registered S3 methods overwritten by 'ggplot2': method from [.quosures rlang c.quosures rlang print.quosures rlang Loading required package: StanHeaders Error: package or namespace load failed for \u2018rstan\u2019 in dyn.load(file, DLLpath = DLLpath, ...): unable to load shared object '/rds-d4/user/jhz22/hpc-work/R/rstan/libs/rstan.so': /usr/lib64/libstdc++.so.6: version `GLIBCXX_3.4.20' not found (required by /rds-d4/user/jhz22/hpc-work/R/rstan/libs/rstan.so) > q() which can be resolved with module load gcc/5.2.0 before invoking R. For error message C++14 standard requested but CXX14 is not defined we modify $HOME/.R/Makevars as follows, CXX14 = g++ -std=c++1y -fPIC see https://github.com/stan-dev/rstan/issues/569 but adding -fPIC and as in unixOBD below.","title":"stan"},{"location":"INSTALL/#unixodbc","text":"It is quite standard to install, i.e., wget ftp://ftp.unixodbc.org/pub/unixODBC/unixODBC-2.3.7.tar.gz tar xvfz unixODBC-2.3.7.tar.gz cd unixODBC-2.3.7 ./configure --prefix=/scratch/jhz22 make make install There have been many discussions regarding \"C++11 standard requested but CXX11 is not defined\" and this could be fixed with changes to $R_HOME/etc/Makeconf such that CXX11 = g++ -std=c++11 -fPIC then module load gcc/5.2.0 R CMD INSTALL odbc This is necessary for gtx for instance.","title":"unixODBC"},{"location":"INSTALL/#zlib","text":"Try sudo apt-get install libz-dev","title":"zlib"},{"location":"LANGUAGES/","text":"Language notes This page collects information on Visual Studio Code, C, C++, Fortran, Java, Perl, python and R. Ada Web: https://www.adaic.org/ From hello.adb , with Ada.Text_IO; procedure Hello is begin Ada.Text_IO.Put_Line (\"Hello, World!\"); end Hello; We run gnatmake hello hello BASIC It is still possible to reflect the old language from here, https://sourceforge.net/projects/devos-studio/ . C There have been multiple instances for suggesting migration from tempnam to mkstemp ( /usr/include/stdlib.h under Bash but not Windows) and mktemp -- the following code illustrates its use under both Bash and Windows, Nevertheless these are not standard routines, one still needs to add char *mktemp(char *) for instance. #include #include /* for open flags */ #include /* for PATH_MAX */ int main(void) { static char template[] = \"/tmp/myfileXXXXXX\"; char fname[PATH_MAX]; static char mesg[] = \"Here's lookin' at you, kid!\\n\"; /* beats \"hello, world\" */ int fd; strcpy(fname, template); mktemp(fname); /* RACE CONDITION WINDOW OPENS */ printf(\"Filename is %s\\n\", fname); /* RACE CONDITION WINDOW LASTS TO HERE */ fd = open(fname, O_CREAT|O_RDWR|O_TRUNC, 0600); write(fd, mesg, strlen(mesg)); close(fd); /* unlink(fname); */ return 0; } A script for testing UTF-8 support by PCRE, #include #include #include int main() { int supports_utf8; if (pcre_config (PCRE_CONFIG_UTF8, &supports_utf8)) { fprintf(stderr, \"pcre_config() failed\\n\"); exit(EXIT_FAILURE); } printf(\"UTF-8 is supported: %s\\n\", supports_utf8 ? \"yes\" : \"no\"); exit(EXIT_SUCCESS); } // gcc $(pkg-config --cflags --libs libpcre) pcreutf.c // ./a.out // pcretest -C The following is Timsort implementation, #include #define MIN_RUN 32 // \u63d2\u5165\u6392\u5e8f\u7b97\u6cd5 void insertionSort(int arr[], int left, int right) { for (int i = left + 1; i <= right; i++) { int key = arr[i]; int j = i - 1; while (j >= left && arr[j] > key) { arr[j + 1] = arr[j]; j--; } arr[j + 1] = key; } } // \u5f52\u5e76\u51fd\u6570 void merge(int arr[], int left, int mid, int right) { int len1 = mid - left + 1; int len2 = right - mid; int L[len1], R[len2]; for (int i = 0; i < len1; i++) L[i] = arr[left + i]; for (int j = 0; j < len2; j++) R[j] = arr[mid + 1 + j]; int i = 0, j = 0, k = left; while (i < len1 && j < len2) { if (L[i] <= R[j]) arr[k++] = L[i++]; else arr[k++] = R[j++]; } while (i < len1) arr[k++] = L[i++]; while (j < len2) arr[k++] = R[j++]; } // Timsort \u7b97\u6cd5 void timSort(int arr[], int n) { for (int i = 0; i < n; i += MIN_RUN) insertionSort(arr, i, (i + MIN_RUN - 1) < n ? (i + MIN_RUN - 1) : (n - 1)); for (int size = MIN_RUN; size < n; size *= 2) { for (int left = 0; left < n; left += 2 * size) { int mid = left + size - 1; int right = (left + 2 * size - 1) < (n - 1) ? (left + 2 * size - 1) : (n - 1); merge(arr, left, mid, right); } } } int main() { int arr[] = {12, 11, 13, 5, 6, 7}; int n = sizeof(arr) / sizeof(arr[0]); printf(\"Original array: \"); for (int i = 0; i < n; i++) printf(\"%d \", arr[i]); timSort(arr, n); printf(\"\\nSorted array: \"); for (int i = 0; i < n; i++) printf(\"%d \", arr[i]); return 0; } y gcc timsort.c -o timsort and timsort to get Original array: 12 11 13 5 6 7 Sorted array: 5 6 7 11 12 13 14:40 C++ The use of Google Test is noted here, Web: https://github.com/google/googletest . wget -qO- https://github.com/google/googletest/archive/refs/tags/release-1.11.0.tar.gz | \\ tar xvfz - cd googletest-release-1.11.0 mkdir build && cd build build .. make # amending set(CMAKE_INSTALL_PREFIX \"/rds/user/jhz22/hpc-work\") in `cmake_install.cmake` make install Now it is possible to compile R/glmnet 4.1-3, i.e., find_package(GTest 1.11 CONFIG REQUIRED) of src/glmnetpp/CMakeLists.txt . Fortran Information on modernising Fortran could be very useful in foreign language calls (e.g., R), http://fortranwiki.org/fortran/show/Modernizing+Old+Fortran. Debugging Fortran code gdb https://undo.io/resources/debugging-fortran-code-gdb/ valgrind program segfault1 implicit none real, dimension(10) :: a integer :: i a = 0. do i = 1, 12 a(i) = i print*,a(i) end do end program segfault1 ! gfortran -g -Wall -Wextra -Wimplicit-interface -fPIC -fmax-errors=1 -fcheck=all -fbacktrace segfault1.f90 -o segfault1 ! valgrind --leak-check=full --dsymutil=yes --track-origins=yes ./segfault1 ! MacOS --dsymutil=yes: ! valgrind --leak-check=full --dsymutil=yes --track-origins=yes ./segfault1 Java The IDE of choice is NetBeans (e.g., DEPICT and JAM); however 8.1 from apt install under Ubuntu 18.04 crashes so it is suggested to download directly from https://netbeans.org/downloads/. To enable JDK it is helpful to specify --javahome option. sudo ./netbeans-8.2-linux.sh --javahome /usr/lib/jvm/java-8-oracale or start with netbeans --javahome /usr/lib/jvm/java-8-oracle (more convenient to set alias netbeans='netbeans --javahome /usr/lib/jvm/java-8-oracle' at .bashrc ). NetBeans 9.0 is currently available from https://netbeans.apache.org/download/nb90/; the .zip file can be downloaded and unpacked for use. For software such as cutadapt cython is required, sudo apt install cython JavaScript A current JavaScript/TypeScript interpreter is deno, https://anaconda.org/conda-forge/deno/files . wget https://anaconda.org/conda-forge/deno/1.40.2/download/linux-64/deno-1.40.2-hfc7925d_0.conda -O deno.conda unzip deno.conda tar --use-compress-program=unzstd -xvf pkg-deno-1.40.2-hfc7925d_0.tar.zst deno --version giving deno 1.40.2 (release, x86_64-unknown-linux-gnu) v8 12.1.285.6 typescript 5.3.3 The mermaid diagram is illustrated with mermaid.html using code available from here, https://cdnjs.cloudflare.com/ajax/libs/mermaid/8.0.0/mermaid.min.js . The call can be embedded in markdown document, The hello world example with plotly.js is https://plot.ly/javascript/getting-started/#hello-world-example and the 3D diagram is with 3d-scatter.html based on https://plot.ly/javascript/3d-scatter-plots/ . The base64 encode/decode is with https://www.base64encode.org/ & https://www.base64decode.org/ . Perl sudo perl -MCPAN -e shell install DBI for instance, as used in VEP . Another notable example is circos, http://circos.ca and its Google group , wget -qO- http://www.circos.ca/distribution/circos-current.tgz | \\ tar xvfz - cd circos-0.69-9 bin/circos --modules wget -qO- http://circos.ca/distribution/circos-tutorials-current.tgz | \\ tar xvfz - wget -qO- http://www.circos.ca/distribution/circos-tools-current.tgz | \\ tar xvfz - The following required modules can be installed Config::General (v2.50 or later) Font::TTF GD List::MoreUtils Math::Bezier Math::Round Math::VecStat Params::Validate Readonly Regexp::Common Set::IntSpan (v1.16 or later) Text::Format and we can enter the example/ directory to run its script. The CircosAPI module requires namespace::autoclean , Moose , JSON::PP and String::Util . Python To disable upgrade of pip, add [global] disable-pip-version-check = True option to $HOME/.config/pip/pip.conf To install a particular version of package, e.g., sudo -H pip install pandas==0.20.1 which is required by DEPICT's munge_sumstats.py . Other pip options include uninstall . The python programs in agotron_detector requires MySQL and can be installed as follows, sudo apt-get install python-dev libmysqlclient-dev sudo pip install MySQL-python It is necessary to use --user option without super-user privilege. PyStan is available with pip install pystan which uses matplotlib, https://github.com/matplotlib and Tkinter, established with sudo apt install python-tk or sudo apt install python3-tk . import pystan schools_code = \"\"\" data { int J; // number of schools real y[J]; // estimated treatment effects real sigma[J]; // s.e. of effect estimates } parameters { real mu; real tau; real eta[J]; } transformed parameters { real theta[J]; for (j in 1:J) theta[j] = mu + tau * eta[j]; } model { eta ~ normal(0, 1); y ~ normal(theta, sigma); } \"\"\" schools_dat = {'J': 8, 'y': [28, 8, -3, 7, -1, 1, 18, 12], 'sigma': [15, 10, 16, 11, 9, 11, 10, 18]} sm = pystan.StanModel(model_code=schools_code) fit = sm.sampling(data=schools_dat, iter=1000, chains=4) import matplotlib.pyplot as plt def plotGraph(): fig = fit.plot() # plt.show() # use the save button or the following command, # f.savefig(\"foo.pdf\", bbox_inches='tight') return fig from matplotlib.backends.backend_pdf import PdfPages pp = PdfPages('foo.pdf') f = plotGraph() pp.savefig(f) pp.close() To install jupyter-book, module load python/2.7.10 python -m pip install jupyter-book --user and we can check for $HOME/.local/lib/python2.7/site-packages and start from /home/jhz22/.local/bin. We can install notebook similarly. python -m pip install notebook --user Owing to recent changes, it is more appropriate to use python3 module load python/3.5 export PATH=$PATH:$HOME/.local/bin export PYTHONPATH=/usr/local/Cluster-Apps/python/3.5.1/lib/python3.5/site-packages:$HOME/.local/lib/python3.5/site-packages python3 -m pip install jupyter-book --user To convert from parquet to csv is done as follows, import pandas as pd import pyspark import pyarrow import sys import os fn = sys.argv[1] print(fn) df = pd.read_parquet(fn) outfn = \"\".join(\"GTEx_Analysis_v8_EUR_eQTL_all_associations_csv/\" + os.path.splitext(os.path.basename(fn))[0] + \".csv\") print(outfn) df.to_csv(outfn) R Information on R and RStudio can be seen from installation section of this, https://jinghuazhao.github.io/Computational-Statistics/INSTALL/ . The use of multi-byte string needs specific handling, e.g., # on Bash iconv myfile -f UTF-8 -t ISO-8859-1 -c and # in R Sys.setlocale(\"LC_ALL\", \"C\") See https://stackoverflow.com/questions/4993837/r-invalid-multibyte-string plotly It requires a number of software, sudo dnf install udunits2-devel sudo dnf install cairo-devel sudo dnf install gdal gdal-devel sudo dnf install proj-devel proj-static sudo dnf install geos geos-devel to be followed by install.packages(\"plotly\",depend=TRUE,repos=\"https://cran.r-project.org\") Calls from R Basic examples using OpenMP with R, for C, C++, F77, and Fortran 2003 using Romp, https://github.com/wrathematics/Romp RFI: R to Modern Fortran Interface, https://github.com/t-kalinowski/RFI Stanford Utility Tools for R packages using Fortran, https://bnaras.github.io/SUtools/articles/SUtools.html Package examples for Fortran, * https://cran.r-project.org/web/packages/Delaporte/index.html. * https://cran.r-project.org/web/packages/spam/index.html * https://cran.r-project.org/web/packages/spam64/index.html Documentation * https://www.avrahamadler.com/2018/12/09/the-need-for-speed-part-1-building-an-r-package-with-fortran/ * https://www.avrahamadler.com/2018/12/23/the-need-for-speed-part-2-c-vs-fortran-vs-c/ * https://www.sciencedirect.com/science/article/pii/S2352711018300785?via%3Dihub R packages See https://r-pkgs.org/index.html . shinyapps Web: https://www.shinyapps.io/ , Shiny examples The hello world version is as follows, library(shiny) ui <- fluidPage( \"Hello, world!\" ) server <- function(input, output, session) { } shinyApp(ui, server) Suppose our a directory (called shinyapps here) contains files ui.R and server.R (or combined in app.R ). Go the web site, and register an account with email address. Login from https://www.shinyapps.io/admin/#/login and the following information is available: Step 1 \u2013 Install rsconnect The rsconnect package can be installed directly from CRAN. To make sure you have the latest version run following code in your R console: install.packages('rsconnect') Step 2 \u2013 Authorize Account The rsconnect package must be authorized to your account using a token and secret. To do this, click the copy button below and we'll copy the whole command you need to your clipboard. Just paste it into your console to authorize your account. Once you've entered the command successfully in R, that computer is now authorized to deploy applications to your shinyapps.io account. rsconnect::setAccountInfo(name='your-account', token='your token', secret='your secret') In the future, you can manage your tokens from the Tokens page the settings menu. Step 3 \u2013 Deploy Once the rsconnect package has been configured, you're ready to deploy your first application. If you haven't written any applications yet, you can also checkout the Getting Started Guide for instructions on how to deploy our demo application. Run the following code in your R console. library(rsconnect) rsconnect::deployApp('path/to/your/app') The shiny page is then up as https://your-account.shinyapps.io/shinyapps/ . A more sophisticated Dashboard using the Gapminer dataset is copied here from R-bloggers. library(shiny) library(dplyr) library(purrr) library(gapminder) library(highcharter) ui <- fluidPage( tags$head( tags$link(rel = \"stylesheet\", type = \"text/css\", href = \"styles.css\") ), sidebarLayout( sidebarPanel( titlePanel(\"R Shiny Highcharts\"), selectInput( inputId = \"inContinent\", label = \"Continent:\", choices = unique(gapminder$continent), selected = \"Europe\" ), selectInput( inputId = \"inYearMin\", label = \"Start year:\", choices = unique(gapminder$year)[1:length(unique(gapminder$year)) - 1], selected = min(gapminder$year) ), selectInput( inputId = \"inYearMax\", label = \"End year:\", choices = unique(gapminder$year)[2:length(unique(gapminder$year))], selected = max(gapminder$year) ), width = 3 ), mainPanel( tags$h3(\"Latest stats:\"), tags$div( tags$div( tags$p(\"# Countries:\"), textOutput(outputId = \"outNCountries\") ) %>% tagAppendAttributes(class = \"stat-card\"), tags$div( tags$p(\"Median life exp:\"), textOutput(outputId = \"outMedLifeExp\") ) %>% tagAppendAttributes(class = \"stat-card\"), tags$div( tags$p(\"Median population:\"), textOutput(outputId = \"outMedPop\") ) %>% tagAppendAttributes(class = \"stat-card\"), tags$div( tags$p(\"Median GDP:\"), textOutput(outputId = \"outMedGDP\") ) %>% tagAppendAttributes(class = \"stat-card\") ) %>% tagAppendAttributes(class = \"stat-card-container\"), tags$div( tags$h3(\"Summary stats:\"), tags$div( tags$div( highchartOutput(outputId = \"chartLifeExpByYear\", height = 500) ) %>% tagAppendAttributes(class = \"chart-card\"), tags$div( highchartOutput(outputId = \"chartGDPByYear\", height = 500) ) %>% tagAppendAttributes(class = \"chart-card\"), ) %>% tagAppendAttributes(class = \"base-charts-container\") ) %>% tagAppendAttributes(class = \"card-container\"), tags$div( tags$h3(\"Drilldown:\"), tags$div( highchartOutput(outputId = \"chartDrilldown\", height = 500) ) %>% tagAppendAttributes(class = \"chart-card chart-card-full\") ) %>% tagAppendAttributes(class = \"card-container\"), width = 9 ) %>% tagAppendAttributes(class = \"main-container\") ) ) server <- function(input, output) { data_cards <- reactive({ gapminder %>% filter( continent == input$inContinent, year == max(year) ) %>% summarise( nCountries = n_distinct(country), medianLifeExp = median(lifeExp), medianPopM = median(pop / 1e6), medianGDP = median(gdpPercap) ) }) data_charts <- reactive({ gapminder %>% filter( continent == input$inContinent, between(year, as.integer(input$inYearMin), as.integer(input$inYearMax)) ) %>% group_by(year) %>% summarise( medianLifeExp = round(median(lifeExp), 1), medianGDP = round(median(gdpPercap), 2) ) }) drilldown_chart_base_data <- reactive({ gapminder %>% filter( continent == input$inContinent, year == max(year) ) %>% group_by(country) %>% summarise( pop = round(pop, 1) ) %>% arrange(desc(pop)) }) drilldown_chart_drilldown_data <- reactive({ gapminder %>% filter( continent == input$inContinent, between(year, as.integer(input$inYearMin), as.integer(input$inYearMax)) ) %>% group_nest(country) %>% mutate( id = country, type = \"column\", data = map(data, mutate, name = year, y = pop), data = map(data, list_parse) ) }) output$outNCountries <- renderText({ data_cards()$nCountries }) output$outMedLifeExp <- renderText({ paste(round(data_cards()$medianLifeExp, 1), \"years\") }) output$outMedPop <- renderText({ paste0(round(data_cards()$medianPopM, 2), \"M\") }) output$outMedGDP <- renderText({ paste0(\"$\", round(data_cards()$medianGDP, 2)) }) output$chartLifeExpByYear <- renderHighchart({ hchart(data_charts(), \"column\", hcaes(x = year, y = medianLifeExp), color = \"#0198f9\", name = \"Median life expectancy\") |> hc_title(text = \"Median life expectancy by year\", align = \"left\") |> hc_xAxis(title = list(text = \"Year\")) |> hc_yAxis(title = list(text = \"Life expectancy\")) }) output$chartGDPByYear <- renderHighchart({ hchart(data_charts(), \"line\", hcaes(x = year, y = medianGDP), color = \"#800000\", name = \"Median GDP\") |> hc_title(text = \"Median GDP by year\", align = \"left\") |> hc_xAxis(title = list(text = \"Year\")) |> hc_yAxis(title = list(text = \"GDP\")) }) output$chartDrilldown <- renderHighchart({ hchart( drilldown_chart_base_data(), \"column\", hcaes(x = country, y = pop, drilldown = country), name = \"Population\" ) %>% hc_drilldown( allowPointDrilldown = TRUE, series = list_parse(drilldown_chart_drilldown_data()) ) |> hc_colors(c(\"#004c5f\")) |> hc_title(text = \"Population report\", align = \"left\") |> hc_xAxis(title = list(text = \"\")) |> hc_yAxis(title = list(text = \"Population\")) }) } shinyApp(ui = ui, server = server) and the www/styles.css is here, www/styles.css @import url('https:/s.googleapis.com/css2?family=Poppins:ital,wght@0,700;1,400&display=swap'); * { margin: 0; padding: 0; box-sizing: border-box; } body { -family: 'Poppins', sans-serif; -weight: 400; } .main-container { padding-top: 1rem; } .stat-card-container { display: flex; justify-content: space-between; column-gap: 1rem; } .stat-card { border: 2px solid #f2f2f2; border-bottom: 2px solid #0198f9; width: 100%; padding: 0.5rem 0 0.5rem 1rem; } .stat-card > p { text-transform: uppercase; color: #808080; } .stat-card > div.shiny-text-output { -size: 3rem; -weight: 700; } .card-container { padding-top: 2rem; } .base-charts-container { display: flex; justify-content: space-between; column-gap: 1rem; } .chart-card { border: 2px solid #f2f2f2; width: 50%; } .chart-card-full { width: 100%; } TeX/LaTeX It is most convient to convert Tex/LaTex formulas into MicroSoft Word equtions via pandoc, i.e., pandoc README.md -o README.docx . See https://pandoc.org/ and https://pandoc.org/try/ . For Chinese language support, try sudo apt-get install texlive-latex-base sudo apt-get install latex-cjk-all sudo apt-get install texlive-latex-extra sudo apt-get install texmaker sudo apt-get install texlive-xetex sudo apt-get install texlive-publishers Now change latex to xelatex from Texmaker. \\documentclass{article} \\usepackage(xeCJK} \\begin{document} How are you?\u4f60\u597d\u5417\uff1f \\LaTeX \\end{document} typescript First, create hello.ts with two lines, #!/usr/bin/env ts-node console.log('Hello world!'); and set up the environment, npm install -g npm npm install typescript ts-node -g chmod +x hello.ts hello.ts Visual Studio Code It is available from https://code.visualstudio.com/download , so we could download a .tar.gz file and unpack. There is a pointer from https://github.com/Microsoft/vscode to https://code.visualstudio.com/Download. Once downloaded, it can be installed with sudo dpkg -i code_1.23.1-1525968403_amd64.deb but it requires libgconf-2-4 ; when failed to install use sudo apt --fix-broken install . See https://code.visualstudio.com/docs/python/python-tutorial for the hello world example. ChatGPT We have export OPENAI_API_KEY=$(grep sk ~/doc/OpenAI) , and our first session: import os import openai openai.api_key = os.getenv(\"OPENAI_API_KEY\") if openai.api_key is None: raise ValueError(\"API key not found. Please set the OPENAI_API_KEY environment variable.\") question = input(\"What is your question? \") response = openai.ChatCompletion.create( model=\"gpt-3.5-turbo\", messages=[ {\"role\": \"user\", \"content\": question}, ], max_tokens=512, n=1, stop=None, temperature=0.8, ) print(response) answer = response['choices'][0]['message']['content'] print(\"OpenAI: \" + answer) CodeGPT We need sign up/in for an API key from https://platform.openai.com/overview , e.g., via your MicrsoSoft account. From Visual Studio Code , add extension CodeGPT ; Ctrl-Shit-p to saarch for CodeGPT: Set API Key and enter the key given above from https://platform.openai.com/account/api-keys . Then we create a file, start with a comment, e.g., '# to draw a forest plot' and Ctrl-Shift-i to obtain the code. Here is one of the results given, # First, create a dataset with the relevant values for your forest plot study_names <- c(\"Study A\", \"Study B\", \"Study C\", \"Study D\") odds_ratios <- c(1.2, 1.5, 0.8, 0.6) lo_ci <- c(0.9, 1.0, 0.5, 0.3) hi_ci <- c(1.5, 2.0, 1.2, 0.9) # Combine data into a data frame df <- data.frame(study_names, odds_ratios, lo_ci, hi_ci) # Load the 'meta' package for the forest plot function library(meta) # Create the forest plot forest(df$odds_ratios, ci.lb = df$lo_ci, ci.ub = df$hi_ci, slab = df$study_names, xlab = \"Odds Ratio\", main = \"Forest Plot Example\")","title":"LANGUAGES"},{"location":"LANGUAGES/#language-notes","text":"This page collects information on Visual Studio Code, C, C++, Fortran, Java, Perl, python and R.","title":"Language notes"},{"location":"LANGUAGES/#ada","text":"Web: https://www.adaic.org/ From hello.adb , with Ada.Text_IO; procedure Hello is begin Ada.Text_IO.Put_Line (\"Hello, World!\"); end Hello; We run gnatmake hello hello","title":"Ada"},{"location":"LANGUAGES/#basic","text":"It is still possible to reflect the old language from here, https://sourceforge.net/projects/devos-studio/ .","title":"BASIC"},{"location":"LANGUAGES/#c","text":"There have been multiple instances for suggesting migration from tempnam to mkstemp ( /usr/include/stdlib.h under Bash but not Windows) and mktemp -- the following code illustrates its use under both Bash and Windows, Nevertheless these are not standard routines, one still needs to add char *mktemp(char *) for instance. #include #include /* for open flags */ #include /* for PATH_MAX */ int main(void) { static char template[] = \"/tmp/myfileXXXXXX\"; char fname[PATH_MAX]; static char mesg[] = \"Here's lookin' at you, kid!\\n\"; /* beats \"hello, world\" */ int fd; strcpy(fname, template); mktemp(fname); /* RACE CONDITION WINDOW OPENS */ printf(\"Filename is %s\\n\", fname); /* RACE CONDITION WINDOW LASTS TO HERE */ fd = open(fname, O_CREAT|O_RDWR|O_TRUNC, 0600); write(fd, mesg, strlen(mesg)); close(fd); /* unlink(fname); */ return 0; } A script for testing UTF-8 support by PCRE, #include #include #include int main() { int supports_utf8; if (pcre_config (PCRE_CONFIG_UTF8, &supports_utf8)) { fprintf(stderr, \"pcre_config() failed\\n\"); exit(EXIT_FAILURE); } printf(\"UTF-8 is supported: %s\\n\", supports_utf8 ? \"yes\" : \"no\"); exit(EXIT_SUCCESS); } // gcc $(pkg-config --cflags --libs libpcre) pcreutf.c // ./a.out // pcretest -C The following is Timsort implementation, #include #define MIN_RUN 32 // \u63d2\u5165\u6392\u5e8f\u7b97\u6cd5 void insertionSort(int arr[], int left, int right) { for (int i = left + 1; i <= right; i++) { int key = arr[i]; int j = i - 1; while (j >= left && arr[j] > key) { arr[j + 1] = arr[j]; j--; } arr[j + 1] = key; } } // \u5f52\u5e76\u51fd\u6570 void merge(int arr[], int left, int mid, int right) { int len1 = mid - left + 1; int len2 = right - mid; int L[len1], R[len2]; for (int i = 0; i < len1; i++) L[i] = arr[left + i]; for (int j = 0; j < len2; j++) R[j] = arr[mid + 1 + j]; int i = 0, j = 0, k = left; while (i < len1 && j < len2) { if (L[i] <= R[j]) arr[k++] = L[i++]; else arr[k++] = R[j++]; } while (i < len1) arr[k++] = L[i++]; while (j < len2) arr[k++] = R[j++]; } // Timsort \u7b97\u6cd5 void timSort(int arr[], int n) { for (int i = 0; i < n; i += MIN_RUN) insertionSort(arr, i, (i + MIN_RUN - 1) < n ? (i + MIN_RUN - 1) : (n - 1)); for (int size = MIN_RUN; size < n; size *= 2) { for (int left = 0; left < n; left += 2 * size) { int mid = left + size - 1; int right = (left + 2 * size - 1) < (n - 1) ? (left + 2 * size - 1) : (n - 1); merge(arr, left, mid, right); } } } int main() { int arr[] = {12, 11, 13, 5, 6, 7}; int n = sizeof(arr) / sizeof(arr[0]); printf(\"Original array: \"); for (int i = 0; i < n; i++) printf(\"%d \", arr[i]); timSort(arr, n); printf(\"\\nSorted array: \"); for (int i = 0; i < n; i++) printf(\"%d \", arr[i]); return 0; } y gcc timsort.c -o timsort and timsort to get Original array: 12 11 13 5 6 7 Sorted array: 5 6 7 11 12 13 14:40","title":"C"},{"location":"LANGUAGES/#c_1","text":"The use of Google Test is noted here, Web: https://github.com/google/googletest . wget -qO- https://github.com/google/googletest/archive/refs/tags/release-1.11.0.tar.gz | \\ tar xvfz - cd googletest-release-1.11.0 mkdir build && cd build build .. make # amending set(CMAKE_INSTALL_PREFIX \"/rds/user/jhz22/hpc-work\") in `cmake_install.cmake` make install Now it is possible to compile R/glmnet 4.1-3, i.e., find_package(GTest 1.11 CONFIG REQUIRED) of src/glmnetpp/CMakeLists.txt .","title":"C++"},{"location":"LANGUAGES/#fortran","text":"Information on modernising Fortran could be very useful in foreign language calls (e.g., R), http://fortranwiki.org/fortran/show/Modernizing+Old+Fortran. Debugging Fortran code gdb https://undo.io/resources/debugging-fortran-code-gdb/ valgrind program segfault1 implicit none real, dimension(10) :: a integer :: i a = 0. do i = 1, 12 a(i) = i print*,a(i) end do end program segfault1 ! gfortran -g -Wall -Wextra -Wimplicit-interface -fPIC -fmax-errors=1 -fcheck=all -fbacktrace segfault1.f90 -o segfault1 ! valgrind --leak-check=full --dsymutil=yes --track-origins=yes ./segfault1 ! MacOS --dsymutil=yes: ! valgrind --leak-check=full --dsymutil=yes --track-origins=yes ./segfault1","title":"Fortran"},{"location":"LANGUAGES/#java","text":"The IDE of choice is NetBeans (e.g., DEPICT and JAM); however 8.1 from apt install under Ubuntu 18.04 crashes so it is suggested to download directly from https://netbeans.org/downloads/. To enable JDK it is helpful to specify --javahome option. sudo ./netbeans-8.2-linux.sh --javahome /usr/lib/jvm/java-8-oracale or start with netbeans --javahome /usr/lib/jvm/java-8-oracle (more convenient to set alias netbeans='netbeans --javahome /usr/lib/jvm/java-8-oracle' at .bashrc ). NetBeans 9.0 is currently available from https://netbeans.apache.org/download/nb90/; the .zip file can be downloaded and unpacked for use. For software such as cutadapt cython is required, sudo apt install cython","title":"Java"},{"location":"LANGUAGES/#javascript","text":"A current JavaScript/TypeScript interpreter is deno, https://anaconda.org/conda-forge/deno/files . wget https://anaconda.org/conda-forge/deno/1.40.2/download/linux-64/deno-1.40.2-hfc7925d_0.conda -O deno.conda unzip deno.conda tar --use-compress-program=unzstd -xvf pkg-deno-1.40.2-hfc7925d_0.tar.zst deno --version giving deno 1.40.2 (release, x86_64-unknown-linux-gnu) v8 12.1.285.6 typescript 5.3.3 The mermaid diagram is illustrated with mermaid.html using code available from here, https://cdnjs.cloudflare.com/ajax/libs/mermaid/8.0.0/mermaid.min.js . The call can be embedded in markdown document, The hello world example with plotly.js is https://plot.ly/javascript/getting-started/#hello-world-example and the 3D diagram is with 3d-scatter.html based on https://plot.ly/javascript/3d-scatter-plots/ . The base64 encode/decode is with https://www.base64encode.org/ & https://www.base64decode.org/ .","title":"JavaScript"},{"location":"LANGUAGES/#perl","text":"sudo perl -MCPAN -e shell install DBI for instance, as used in VEP . Another notable example is circos, http://circos.ca and its Google group , wget -qO- http://www.circos.ca/distribution/circos-current.tgz | \\ tar xvfz - cd circos-0.69-9 bin/circos --modules wget -qO- http://circos.ca/distribution/circos-tutorials-current.tgz | \\ tar xvfz - wget -qO- http://www.circos.ca/distribution/circos-tools-current.tgz | \\ tar xvfz - The following required modules can be installed Config::General (v2.50 or later) Font::TTF GD List::MoreUtils Math::Bezier Math::Round Math::VecStat Params::Validate Readonly Regexp::Common Set::IntSpan (v1.16 or later) Text::Format and we can enter the example/ directory to run its script. The CircosAPI module requires namespace::autoclean , Moose , JSON::PP and String::Util .","title":"Perl"},{"location":"LANGUAGES/#python","text":"To disable upgrade of pip, add [global] disable-pip-version-check = True option to $HOME/.config/pip/pip.conf To install a particular version of package, e.g., sudo -H pip install pandas==0.20.1 which is required by DEPICT's munge_sumstats.py . Other pip options include uninstall . The python programs in agotron_detector requires MySQL and can be installed as follows, sudo apt-get install python-dev libmysqlclient-dev sudo pip install MySQL-python It is necessary to use --user option without super-user privilege. PyStan is available with pip install pystan which uses matplotlib, https://github.com/matplotlib and Tkinter, established with sudo apt install python-tk or sudo apt install python3-tk . import pystan schools_code = \"\"\" data { int J; // number of schools real y[J]; // estimated treatment effects real sigma[J]; // s.e. of effect estimates } parameters { real mu; real tau; real eta[J]; } transformed parameters { real theta[J]; for (j in 1:J) theta[j] = mu + tau * eta[j]; } model { eta ~ normal(0, 1); y ~ normal(theta, sigma); } \"\"\" schools_dat = {'J': 8, 'y': [28, 8, -3, 7, -1, 1, 18, 12], 'sigma': [15, 10, 16, 11, 9, 11, 10, 18]} sm = pystan.StanModel(model_code=schools_code) fit = sm.sampling(data=schools_dat, iter=1000, chains=4) import matplotlib.pyplot as plt def plotGraph(): fig = fit.plot() # plt.show() # use the save button or the following command, # f.savefig(\"foo.pdf\", bbox_inches='tight') return fig from matplotlib.backends.backend_pdf import PdfPages pp = PdfPages('foo.pdf') f = plotGraph() pp.savefig(f) pp.close() To install jupyter-book, module load python/2.7.10 python -m pip install jupyter-book --user and we can check for $HOME/.local/lib/python2.7/site-packages and start from /home/jhz22/.local/bin. We can install notebook similarly. python -m pip install notebook --user Owing to recent changes, it is more appropriate to use python3 module load python/3.5 export PATH=$PATH:$HOME/.local/bin export PYTHONPATH=/usr/local/Cluster-Apps/python/3.5.1/lib/python3.5/site-packages:$HOME/.local/lib/python3.5/site-packages python3 -m pip install jupyter-book --user To convert from parquet to csv is done as follows, import pandas as pd import pyspark import pyarrow import sys import os fn = sys.argv[1] print(fn) df = pd.read_parquet(fn) outfn = \"\".join(\"GTEx_Analysis_v8_EUR_eQTL_all_associations_csv/\" + os.path.splitext(os.path.basename(fn))[0] + \".csv\") print(outfn) df.to_csv(outfn)","title":"Python"},{"location":"LANGUAGES/#r","text":"Information on R and RStudio can be seen from installation section of this, https://jinghuazhao.github.io/Computational-Statistics/INSTALL/ . The use of multi-byte string needs specific handling, e.g., # on Bash iconv myfile -f UTF-8 -t ISO-8859-1 -c and # in R Sys.setlocale(\"LC_ALL\", \"C\") See https://stackoverflow.com/questions/4993837/r-invalid-multibyte-string plotly It requires a number of software, sudo dnf install udunits2-devel sudo dnf install cairo-devel sudo dnf install gdal gdal-devel sudo dnf install proj-devel proj-static sudo dnf install geos geos-devel to be followed by install.packages(\"plotly\",depend=TRUE,repos=\"https://cran.r-project.org\") Calls from R Basic examples using OpenMP with R, for C, C++, F77, and Fortran 2003 using Romp, https://github.com/wrathematics/Romp RFI: R to Modern Fortran Interface, https://github.com/t-kalinowski/RFI Stanford Utility Tools for R packages using Fortran, https://bnaras.github.io/SUtools/articles/SUtools.html Package examples for Fortran, * https://cran.r-project.org/web/packages/Delaporte/index.html. * https://cran.r-project.org/web/packages/spam/index.html * https://cran.r-project.org/web/packages/spam64/index.html Documentation * https://www.avrahamadler.com/2018/12/09/the-need-for-speed-part-1-building-an-r-package-with-fortran/ * https://www.avrahamadler.com/2018/12/23/the-need-for-speed-part-2-c-vs-fortran-vs-c/ * https://www.sciencedirect.com/science/article/pii/S2352711018300785?via%3Dihub R packages See https://r-pkgs.org/index.html .","title":"R"},{"location":"LANGUAGES/#shinyapps","text":"Web: https://www.shinyapps.io/ , Shiny examples The hello world version is as follows, library(shiny) ui <- fluidPage( \"Hello, world!\" ) server <- function(input, output, session) { } shinyApp(ui, server) Suppose our a directory (called shinyapps here) contains files ui.R and server.R (or combined in app.R ). Go the web site, and register an account with email address. Login from https://www.shinyapps.io/admin/#/login and the following information is available:","title":"shinyapps"},{"location":"LANGUAGES/#step-1-install-rsconnect","text":"The rsconnect package can be installed directly from CRAN. To make sure you have the latest version run following code in your R console: install.packages('rsconnect')","title":"Step 1 \u2013 Install rsconnect"},{"location":"LANGUAGES/#step-2-authorize-account","text":"The rsconnect package must be authorized to your account using a token and secret. To do this, click the copy button below and we'll copy the whole command you need to your clipboard. Just paste it into your console to authorize your account. Once you've entered the command successfully in R, that computer is now authorized to deploy applications to your shinyapps.io account. rsconnect::setAccountInfo(name='your-account', token='your token', secret='your secret') In the future, you can manage your tokens from the Tokens page the settings menu.","title":"Step 2 \u2013 Authorize Account"},{"location":"LANGUAGES/#step-3-deploy","text":"Once the rsconnect package has been configured, you're ready to deploy your first application. If you haven't written any applications yet, you can also checkout the Getting Started Guide for instructions on how to deploy our demo application. Run the following code in your R console. library(rsconnect) rsconnect::deployApp('path/to/your/app') The shiny page is then up as https://your-account.shinyapps.io/shinyapps/ . A more sophisticated Dashboard using the Gapminer dataset is copied here from R-bloggers. library(shiny) library(dplyr) library(purrr) library(gapminder) library(highcharter) ui <- fluidPage( tags$head( tags$link(rel = \"stylesheet\", type = \"text/css\", href = \"styles.css\") ), sidebarLayout( sidebarPanel( titlePanel(\"R Shiny Highcharts\"), selectInput( inputId = \"inContinent\", label = \"Continent:\", choices = unique(gapminder$continent), selected = \"Europe\" ), selectInput( inputId = \"inYearMin\", label = \"Start year:\", choices = unique(gapminder$year)[1:length(unique(gapminder$year)) - 1], selected = min(gapminder$year) ), selectInput( inputId = \"inYearMax\", label = \"End year:\", choices = unique(gapminder$year)[2:length(unique(gapminder$year))], selected = max(gapminder$year) ), width = 3 ), mainPanel( tags$h3(\"Latest stats:\"), tags$div( tags$div( tags$p(\"# Countries:\"), textOutput(outputId = \"outNCountries\") ) %>% tagAppendAttributes(class = \"stat-card\"), tags$div( tags$p(\"Median life exp:\"), textOutput(outputId = \"outMedLifeExp\") ) %>% tagAppendAttributes(class = \"stat-card\"), tags$div( tags$p(\"Median population:\"), textOutput(outputId = \"outMedPop\") ) %>% tagAppendAttributes(class = \"stat-card\"), tags$div( tags$p(\"Median GDP:\"), textOutput(outputId = \"outMedGDP\") ) %>% tagAppendAttributes(class = \"stat-card\") ) %>% tagAppendAttributes(class = \"stat-card-container\"), tags$div( tags$h3(\"Summary stats:\"), tags$div( tags$div( highchartOutput(outputId = \"chartLifeExpByYear\", height = 500) ) %>% tagAppendAttributes(class = \"chart-card\"), tags$div( highchartOutput(outputId = \"chartGDPByYear\", height = 500) ) %>% tagAppendAttributes(class = \"chart-card\"), ) %>% tagAppendAttributes(class = \"base-charts-container\") ) %>% tagAppendAttributes(class = \"card-container\"), tags$div( tags$h3(\"Drilldown:\"), tags$div( highchartOutput(outputId = \"chartDrilldown\", height = 500) ) %>% tagAppendAttributes(class = \"chart-card chart-card-full\") ) %>% tagAppendAttributes(class = \"card-container\"), width = 9 ) %>% tagAppendAttributes(class = \"main-container\") ) ) server <- function(input, output) { data_cards <- reactive({ gapminder %>% filter( continent == input$inContinent, year == max(year) ) %>% summarise( nCountries = n_distinct(country), medianLifeExp = median(lifeExp), medianPopM = median(pop / 1e6), medianGDP = median(gdpPercap) ) }) data_charts <- reactive({ gapminder %>% filter( continent == input$inContinent, between(year, as.integer(input$inYearMin), as.integer(input$inYearMax)) ) %>% group_by(year) %>% summarise( medianLifeExp = round(median(lifeExp), 1), medianGDP = round(median(gdpPercap), 2) ) }) drilldown_chart_base_data <- reactive({ gapminder %>% filter( continent == input$inContinent, year == max(year) ) %>% group_by(country) %>% summarise( pop = round(pop, 1) ) %>% arrange(desc(pop)) }) drilldown_chart_drilldown_data <- reactive({ gapminder %>% filter( continent == input$inContinent, between(year, as.integer(input$inYearMin), as.integer(input$inYearMax)) ) %>% group_nest(country) %>% mutate( id = country, type = \"column\", data = map(data, mutate, name = year, y = pop), data = map(data, list_parse) ) }) output$outNCountries <- renderText({ data_cards()$nCountries }) output$outMedLifeExp <- renderText({ paste(round(data_cards()$medianLifeExp, 1), \"years\") }) output$outMedPop <- renderText({ paste0(round(data_cards()$medianPopM, 2), \"M\") }) output$outMedGDP <- renderText({ paste0(\"$\", round(data_cards()$medianGDP, 2)) }) output$chartLifeExpByYear <- renderHighchart({ hchart(data_charts(), \"column\", hcaes(x = year, y = medianLifeExp), color = \"#0198f9\", name = \"Median life expectancy\") |> hc_title(text = \"Median life expectancy by year\", align = \"left\") |> hc_xAxis(title = list(text = \"Year\")) |> hc_yAxis(title = list(text = \"Life expectancy\")) }) output$chartGDPByYear <- renderHighchart({ hchart(data_charts(), \"line\", hcaes(x = year, y = medianGDP), color = \"#800000\", name = \"Median GDP\") |> hc_title(text = \"Median GDP by year\", align = \"left\") |> hc_xAxis(title = list(text = \"Year\")) |> hc_yAxis(title = list(text = \"GDP\")) }) output$chartDrilldown <- renderHighchart({ hchart( drilldown_chart_base_data(), \"column\", hcaes(x = country, y = pop, drilldown = country), name = \"Population\" ) %>% hc_drilldown( allowPointDrilldown = TRUE, series = list_parse(drilldown_chart_drilldown_data()) ) |> hc_colors(c(\"#004c5f\")) |> hc_title(text = \"Population report\", align = \"left\") |> hc_xAxis(title = list(text = \"\")) |> hc_yAxis(title = list(text = \"Population\")) }) } shinyApp(ui = ui, server = server) and the www/styles.css is here, www/styles.css @import url('https:/s.googleapis.com/css2?family=Poppins:ital,wght@0,700;1,400&display=swap'); * { margin: 0; padding: 0; box-sizing: border-box; } body { -family: 'Poppins', sans-serif; -weight: 400; } .main-container { padding-top: 1rem; } .stat-card-container { display: flex; justify-content: space-between; column-gap: 1rem; } .stat-card { border: 2px solid #f2f2f2; border-bottom: 2px solid #0198f9; width: 100%; padding: 0.5rem 0 0.5rem 1rem; } .stat-card > p { text-transform: uppercase; color: #808080; } .stat-card > div.shiny-text-output { -size: 3rem; -weight: 700; } .card-container { padding-top: 2rem; } .base-charts-container { display: flex; justify-content: space-between; column-gap: 1rem; } .chart-card { border: 2px solid #f2f2f2; width: 50%; } .chart-card-full { width: 100%; }","title":"Step 3 \u2013 Deploy"},{"location":"LANGUAGES/#texlatex","text":"It is most convient to convert Tex/LaTex formulas into MicroSoft Word equtions via pandoc, i.e., pandoc README.md -o README.docx . See https://pandoc.org/ and https://pandoc.org/try/ . For Chinese language support, try sudo apt-get install texlive-latex-base sudo apt-get install latex-cjk-all sudo apt-get install texlive-latex-extra sudo apt-get install texmaker sudo apt-get install texlive-xetex sudo apt-get install texlive-publishers Now change latex to xelatex from Texmaker. \\documentclass{article} \\usepackage(xeCJK} \\begin{document} How are you?\u4f60\u597d\u5417\uff1f \\LaTeX \\end{document}","title":"TeX/LaTeX"},{"location":"LANGUAGES/#typescript","text":"First, create hello.ts with two lines, #!/usr/bin/env ts-node console.log('Hello world!'); and set up the environment, npm install -g npm npm install typescript ts-node -g chmod +x hello.ts hello.ts","title":"typescript"},{"location":"LANGUAGES/#visual-studio-code","text":"It is available from https://code.visualstudio.com/download , so we could download a .tar.gz file and unpack. There is a pointer from https://github.com/Microsoft/vscode to https://code.visualstudio.com/Download. Once downloaded, it can be installed with sudo dpkg -i code_1.23.1-1525968403_amd64.deb but it requires libgconf-2-4 ; when failed to install use sudo apt --fix-broken install . See https://code.visualstudio.com/docs/python/python-tutorial for the hello world example.","title":"Visual Studio Code"},{"location":"LANGUAGES/#chatgpt","text":"We have export OPENAI_API_KEY=$(grep sk ~/doc/OpenAI) , and our first session: import os import openai openai.api_key = os.getenv(\"OPENAI_API_KEY\") if openai.api_key is None: raise ValueError(\"API key not found. Please set the OPENAI_API_KEY environment variable.\") question = input(\"What is your question? \") response = openai.ChatCompletion.create( model=\"gpt-3.5-turbo\", messages=[ {\"role\": \"user\", \"content\": question}, ], max_tokens=512, n=1, stop=None, temperature=0.8, ) print(response) answer = response['choices'][0]['message']['content'] print(\"OpenAI: \" + answer)","title":"ChatGPT"},{"location":"LANGUAGES/#codegpt","text":"We need sign up/in for an API key from https://platform.openai.com/overview , e.g., via your MicrsoSoft account. From Visual Studio Code , add extension CodeGPT ; Ctrl-Shit-p to saarch for CodeGPT: Set API Key and enter the key given above from https://platform.openai.com/account/api-keys . Then we create a file, start with a comment, e.g., '# to draw a forest plot' and Ctrl-Shift-i to obtain the code. Here is one of the results given, # First, create a dataset with the relevant values for your forest plot study_names <- c(\"Study A\", \"Study B\", \"Study C\", \"Study D\") odds_ratios <- c(1.2, 1.5, 0.8, 0.6) lo_ci <- c(0.9, 1.0, 0.5, 0.3) hi_ci <- c(1.5, 2.0, 1.2, 0.9) # Combine data into a data frame df <- data.frame(study_names, odds_ratios, lo_ci, hi_ci) # Load the 'meta' package for the forest plot function library(meta) # Create the forest plot forest(df$odds_ratios, ci.lb = df$lo_ci, ci.ub = df$hi_ci, slab = df$study_names, xlab = \"Odds Ratio\", main = \"Forest Plot Example\")","title":"CodeGPT"},{"location":"PARALLEL/","text":"Parallel computing Recent notes are available from https://cambridge-ceu.github.io/csd3/systems/ParallelComputing.html . GNU parallel It has home https://www.gnu.org/software/parallel/ -- note especially with its --env to pass environment variables. Under Ubuntu, GNU parallel is easily installed as follows, sudo apt install parallel Earlier version had issues with temporary direvtory, e.g., https://stackoverflow.com/questions/24398941/gnu-parallel-unlink-error if module load parallel/20131222 The latest, http://ftp.gnu.org/gnu/parallel/parallel-latest.tar.bz2 , can be used instead. SGE Sun Grid Engine has a wiki entry . https://peteris.rocks/blog/sun-grid-engine-installation-on-ubuntu-server/ . To delete SGE jobs shown in qstat, use qstat | grep $USER | cut -d\" \" -f1 | xargs qdel Otherwise for a consecutive sequence we use qdel {id1..id2}. SLURM Under Ubuntu, it can be installed with sudo apt install slurm-client General information is available from https://slurm.schedmd.com/ . Job scheduling examples on CentOS 6 and RHEL 7, https://www.arc.ox.ac.uk/arc-systems-0 . command description sacct report job accounting information about active or completed jobs salloc allocate resources for a job in real time (typically used to allocate resources and spawn a shell, in which the srun command is used to launch parallel tasks) sbatch submit a job script for later execution (the script typically contains one or more srun commands to launch parallel tasks) scancel cancel a pending or running job scontrol hold, holdu, release, requeue, requeuehold, suspend and resume commands sinfo reports the state of partitions and nodes managed by Slurm (it has a variety of filtering, sorting, and formatting options) squeue reports the state of jobs (it has a variety of filtering, sorting, and formatting options), by default, reports the running jobs in priority order followed by the pending jobs in priority order srun used to submit a job for execution in real time e.g., squeue -u $USER -r; qstat -u $USER; also scontrol show config; scontrol show partition; scontrol show job [jobid] and sview To see environmental variables, e.g., MaxArraySize, we use scontrol show config | sed -n '/^MaxArraySize/s/.*= *//p' job array, https://slurm.schedmd.com/job_array.html dependency, https://hpc.nih.gov/docs/job_dependencies.html examples, >https://github.com/statgen/SLURM-examples> temporary directories, https://help.rc.ufl.edu/doc/Temporary_Directories When a SLURM job starts, the scheduler creates a temporary directory for the job on the compute node's local hard drive. This $SLURM_TMPDIR directory is very useful for jobs that need to use or generate a large number of small files, as the /ufrc parallel filesystem is optimized for large file streaming and is less suitable for small files. The directory is owned by the user running the job. The path to the temporary directory is made available as the $SLURM_TMPDIR variable. At the end of the job, the temporary directory is automatically removed. You can use the ${SLURM_TMPDIR} variable in job scripts to copy temporary data to the temporary job directory. If necessary, it can also be used as argument for applications that accept a temporary directory argument. Many applications and programming languages use the $TMPDIR environment variable, if available, as the default temporary directory path. If this variable is not set, the applications will default to using the /tmp directory, which is not desirable. SLURM will set $TMPDIR to the same value as $SLURM_TMPDIR unless $TMPDIR has already been set, in which case it will be ignored. Check your job script(s) and shell initialization files like .bashrc and .bash_profile to make sure you do not have $TMPDIR set. If a personal Singularity container is used, make sure that the $SINGULARITYENV_TMPDIR variable is set within the job to export the local scratch location into the Singularity container. Examples of an interactive session can be simply sintr , or srun -N1 -n1 -c6 -p short,medium,long -t 12:0:0 --pty bash -i so that the earliest available partition will be used. SGE to SLURM Conversion is documented at https://srcc.stanford.edu/sge-slurm-conversion . EXAMPLES We intended to convert a large number of PDF files (INTERVAL.*.manhattn.pdf) to PNG with smaller file sizes. To start, we build a file list, ls *pdf | \\ sed 's/INTERVAL.//g;s/.manhattan.pdf//g' > INTERVAL.list We do this with GNU parallel as follows, cat INTERVAL.list | \\ parallel -C' ' ' echo {} pdftopng -r 300 INTERVAL.{}.manhattan.pdf mv {}-000001.png INTERVAL.{}.png ' or with SLURM, #!/bin/bash #SBATCH --ntasks=1 #SBATCH --job-name=pdftopng #SBATCH --time=6:00:00 #SBATCH --cpus-per-task=8 #SBATCH --partition=short #SBATCH --array=1-50 #SBATCH --output=work/pdftopng_%A_%a.out #SBATCH --error=work/pdftopng_%A_%a.err #SBATCH --export ALL . /etc/profile.d/modules.sh module load default-cardio module load slurm module load use.own export p=$(awk 'NR==ENVIRON[\"SLURM_ARRAY_TASK_ID\"]' INTERVAL.list) export TMPDIR=/scratch/jhz22/tmp echo ${p} pdftopng -r 300 INTERVAL.${p}.manhattan.pdf ${p} mv ${p}-000001.png INTERVAL.${p}.png This is a single parameter case and it is possible to allow for more parameters in both cases. Note also that the option --array=1-50 instructs the system to schedule jobs and in jobs with large memory usage it is more preferable to change to --array 1-50%4 so that a maximum of four jobs will be run simultaneously. Job scheduling echo \"ls -l\" | at 01:00 crontab.guru, https://crontab.guru/examples.html","title":"PARALLEL"},{"location":"PARALLEL/#parallel-computing","text":"Recent notes are available from https://cambridge-ceu.github.io/csd3/systems/ParallelComputing.html .","title":"Parallel computing"},{"location":"PARALLEL/#gnu-parallel","text":"It has home https://www.gnu.org/software/parallel/ -- note especially with its --env to pass environment variables. Under Ubuntu, GNU parallel is easily installed as follows, sudo apt install parallel Earlier version had issues with temporary direvtory, e.g., https://stackoverflow.com/questions/24398941/gnu-parallel-unlink-error if module load parallel/20131222 The latest, http://ftp.gnu.org/gnu/parallel/parallel-latest.tar.bz2 , can be used instead.","title":"GNU parallel"},{"location":"PARALLEL/#sge","text":"Sun Grid Engine has a wiki entry . https://peteris.rocks/blog/sun-grid-engine-installation-on-ubuntu-server/ . To delete SGE jobs shown in qstat, use qstat | grep $USER | cut -d\" \" -f1 | xargs qdel Otherwise for a consecutive sequence we use qdel {id1..id2}.","title":"SGE"},{"location":"PARALLEL/#slurm","text":"Under Ubuntu, it can be installed with sudo apt install slurm-client General information is available from https://slurm.schedmd.com/ . Job scheduling examples on CentOS 6 and RHEL 7, https://www.arc.ox.ac.uk/arc-systems-0 . command description sacct report job accounting information about active or completed jobs salloc allocate resources for a job in real time (typically used to allocate resources and spawn a shell, in which the srun command is used to launch parallel tasks) sbatch submit a job script for later execution (the script typically contains one or more srun commands to launch parallel tasks) scancel cancel a pending or running job scontrol hold, holdu, release, requeue, requeuehold, suspend and resume commands sinfo reports the state of partitions and nodes managed by Slurm (it has a variety of filtering, sorting, and formatting options) squeue reports the state of jobs (it has a variety of filtering, sorting, and formatting options), by default, reports the running jobs in priority order followed by the pending jobs in priority order srun used to submit a job for execution in real time e.g., squeue -u $USER -r; qstat -u $USER; also scontrol show config; scontrol show partition; scontrol show job [jobid] and sview To see environmental variables, e.g., MaxArraySize, we use scontrol show config | sed -n '/^MaxArraySize/s/.*= *//p' job array, https://slurm.schedmd.com/job_array.html dependency, https://hpc.nih.gov/docs/job_dependencies.html examples, >https://github.com/statgen/SLURM-examples> temporary directories, https://help.rc.ufl.edu/doc/Temporary_Directories When a SLURM job starts, the scheduler creates a temporary directory for the job on the compute node's local hard drive. This $SLURM_TMPDIR directory is very useful for jobs that need to use or generate a large number of small files, as the /ufrc parallel filesystem is optimized for large file streaming and is less suitable for small files. The directory is owned by the user running the job. The path to the temporary directory is made available as the $SLURM_TMPDIR variable. At the end of the job, the temporary directory is automatically removed. You can use the ${SLURM_TMPDIR} variable in job scripts to copy temporary data to the temporary job directory. If necessary, it can also be used as argument for applications that accept a temporary directory argument. Many applications and programming languages use the $TMPDIR environment variable, if available, as the default temporary directory path. If this variable is not set, the applications will default to using the /tmp directory, which is not desirable. SLURM will set $TMPDIR to the same value as $SLURM_TMPDIR unless $TMPDIR has already been set, in which case it will be ignored. Check your job script(s) and shell initialization files like .bashrc and .bash_profile to make sure you do not have $TMPDIR set. If a personal Singularity container is used, make sure that the $SINGULARITYENV_TMPDIR variable is set within the job to export the local scratch location into the Singularity container. Examples of an interactive session can be simply sintr , or srun -N1 -n1 -c6 -p short,medium,long -t 12:0:0 --pty bash -i so that the earliest available partition will be used.","title":"SLURM"},{"location":"PARALLEL/#sge-to-slurm","text":"Conversion is documented at https://srcc.stanford.edu/sge-slurm-conversion .","title":"SGE to SLURM"},{"location":"PARALLEL/#examples","text":"We intended to convert a large number of PDF files (INTERVAL.*.manhattn.pdf) to PNG with smaller file sizes. To start, we build a file list, ls *pdf | \\ sed 's/INTERVAL.//g;s/.manhattan.pdf//g' > INTERVAL.list We do this with GNU parallel as follows, cat INTERVAL.list | \\ parallel -C' ' ' echo {} pdftopng -r 300 INTERVAL.{}.manhattan.pdf mv {}-000001.png INTERVAL.{}.png ' or with SLURM, #!/bin/bash #SBATCH --ntasks=1 #SBATCH --job-name=pdftopng #SBATCH --time=6:00:00 #SBATCH --cpus-per-task=8 #SBATCH --partition=short #SBATCH --array=1-50 #SBATCH --output=work/pdftopng_%A_%a.out #SBATCH --error=work/pdftopng_%A_%a.err #SBATCH --export ALL . /etc/profile.d/modules.sh module load default-cardio module load slurm module load use.own export p=$(awk 'NR==ENVIRON[\"SLURM_ARRAY_TASK_ID\"]' INTERVAL.list) export TMPDIR=/scratch/jhz22/tmp echo ${p} pdftopng -r 300 INTERVAL.${p}.manhattan.pdf ${p} mv ${p}-000001.png INTERVAL.${p}.png This is a single parameter case and it is possible to allow for more parameters in both cases. Note also that the option --array=1-50 instructs the system to schedule jobs and in jobs with large memory usage it is more preferable to change to --array 1-50%4 so that a maximum of four jobs will be run simultaneously.","title":"EXAMPLES"},{"location":"PARALLEL/#job-scheduling","text":"echo \"ls -l\" | at 01:00 crontab.guru, https://crontab.guru/examples.html","title":"Job scheduling"},{"location":"REPRODUCE/","text":"Reproducible research a.k.a Literate programming. bookdown . CWEB . Jupyter . knitr . noweb and its Tufts site . Pweave and ptangle . Sweave and Stangle . An attempt to model reproducibility is Conceptualizing Reproducibility Using Simulations and Theory ( CRUST ). bookmarks for PDF To install for Ubuntu, sudo snap install pdftk # version 2.02-4, or sudo apt install pdftk-java # version 3.2.2-1 and for Fedora, sudo dnf install pdftk-java . pdftk src.pdf dump_data output bookmarks.txt pdftk target.pdf update_info bookmarks.txt output target-bm.pdf quarto This is extensively documented under Linux, https://cambridge-ceu.github.io/csd3/applications/quarto.html . Under Windows, however it is simpler which involves these steps Install R, e.g., R-4.3.1 Optionally, one installs RStudio or Visual Studio Code (Extension Quarto available, ctrl-+/- to enlarge/shrink fonts). Install python from https://www.python.org/downloads/ rem Program files\\quarto\\bin\\tools deno upgrade py -m pip install tinytex py -m pip install jupyter py -m pip install numpy py -m pip install matplotlib Install quarto, e.g., quato-1.3.450, from https://quarto.org Now we intend to render matplotlib.qmd adapted from https://quarto.org , rendered by quarto render matplotlib.qmd . --- title: \"matplotlib demo\" format: html: code-fold: true jupyter: python3 --- For a demonstration of a line plot on a polar axis, see @fig-polar. ```{python} #| label: fig-polar #| fig-cap: \"A line plot on a polar axis\" import numpy as np import matplotlib.pyplot as plt r = np.arange(0, 2, 0.01) theta = 2 * np.pi * r fig, ax = plt.subplots( subplot_kw = {'projection': 'polar'} ) ax.plot(theta, r) ax.set_rticks([0.5, 1, 1.5, 2]) ax.grid(True) plt.show() ``` Reference Devezer B, Nardin LG, Baumgaertner B, Buzbas EO. Scientific discovery in a model-centric framework: Reproducibility, innovation, and epistemic diversity. PLoS One . 2019 May 15;14(5):e0216125. doi: 10.1371/journal.pone.0216125. eCollection 2019.","title":"REPRODUCE"},{"location":"REPRODUCE/#reproducible-research","text":"a.k.a Literate programming. bookdown . CWEB . Jupyter . knitr . noweb and its Tufts site . Pweave and ptangle . Sweave and Stangle . An attempt to model reproducibility is Conceptualizing Reproducibility Using Simulations and Theory ( CRUST ).","title":"Reproducible research"},{"location":"REPRODUCE/#bookmarks-for-pdf","text":"To install for Ubuntu, sudo snap install pdftk # version 2.02-4, or sudo apt install pdftk-java # version 3.2.2-1 and for Fedora, sudo dnf install pdftk-java . pdftk src.pdf dump_data output bookmarks.txt pdftk target.pdf update_info bookmarks.txt output target-bm.pdf","title":"bookmarks for PDF"},{"location":"REPRODUCE/#quarto","text":"This is extensively documented under Linux, https://cambridge-ceu.github.io/csd3/applications/quarto.html . Under Windows, however it is simpler which involves these steps Install R, e.g., R-4.3.1 Optionally, one installs RStudio or Visual Studio Code (Extension Quarto available, ctrl-+/- to enlarge/shrink fonts). Install python from https://www.python.org/downloads/ rem Program files\\quarto\\bin\\tools deno upgrade py -m pip install tinytex py -m pip install jupyter py -m pip install numpy py -m pip install matplotlib Install quarto, e.g., quato-1.3.450, from https://quarto.org Now we intend to render matplotlib.qmd adapted from https://quarto.org , rendered by quarto render matplotlib.qmd . --- title: \"matplotlib demo\" format: html: code-fold: true jupyter: python3 --- For a demonstration of a line plot on a polar axis, see @fig-polar. ```{python} #| label: fig-polar #| fig-cap: \"A line plot on a polar axis\" import numpy as np import matplotlib.pyplot as plt r = np.arange(0, 2, 0.01) theta = 2 * np.pi * r fig, ax = plt.subplots( subplot_kw = {'projection': 'polar'} ) ax.plot(theta, r) ax.set_rticks([0.5, 1, 1.5, 2]) ax.grid(True) plt.show() ```","title":"quarto"},{"location":"REPRODUCE/#reference","text":"Devezer B, Nardin LG, Baumgaertner B, Buzbas EO. Scientific discovery in a model-centric framework: Reproducibility, innovation, and epistemic diversity. PLoS One . 2019 May 15;14(5):e0216125. doi: 10.1371/journal.pone.0216125. eCollection 2019.","title":"Reference"},{"location":"SYSTEMS/","text":"Systems This is a skeleton to list items to be detailed in the near future. FreeDOS and Linux FreeDOS is available from http://www.freedos.org/ . FreeDOS is an open source DOS-compatible operating system that you can use to play classic DOS games, run legacy business software, or develop embedded systems. Any program that works on MS-DOS should also run on FreeDOS. It is notable that v1.3 provides liveCD and liteUSB which could be useful. Here describes how to convert VMDK format to iso, https://www.ilovefreesoftware.com/26/featured/how-to-convert-vmdk-to-iso-in-windows.html , e.g., qemu-img convert -f vmdk FD13LITE.VMDK pd.raw dd if=pd.raw of=pd.iso We can then use rufus, https://rufus.ie/ , to generate a bootable USB allowing for disk partition by fdisk -- in fact rufus itself can produce a bootable USB nevertheless with no utilities. This is useful to install Linux on very old computers, e.g., reorganise hard drive and then install Fedora from a liveUSB generated from Fedora Media Writer, https://getfedora.org/en/workstation/download/ . There are multiple routes to install particular Linux software; one may prefer to install them as standable but it may also come handy use mini-environments such as Anaconda, Miniconda, Linuxbrew or those already in system (e.g. Ubuntu) archive. A rich source of tips are in the-art-of-command-line and awesome-shell . The following command gives bit information (32 or 64) getconf LONG_BIT The LSB (Linux Standard Base) and distribution information is given with lsb_release -a Under Ubuntu, this could be made available with sudo apt-get install lsb-core . Under Fedora, you may be prompted to install package redhat-lsb-core . Related commands are uname -a and lscpu . The CPU speed can be seen with watch -n.1 \"cat /proc/cpuinfo | grep \\\"^[c]pu MHz\\\"\" The screen utility is operated as follows, screen -S screen -ls screen -r An introduction to Linux Access Control Lists (ACLs), https://www.redhat.com/sysadmin/linux-access-control-lists . To enable color with nano, try find /usr/share/nano/ -iname \"*.nanorc\" -exec echo include {} \\; >> ~/.nanorc Fedora This has already been covered above, i.e., https://getfedora.org/workstation/download/ . Ubuntu Releases are available from http://releases.ubuntu.com and packages are listed at https://packages.ubuntu.com/ . Often it is helpful to run sudo apt update sudo apt upgrade to be in line with the current release; one can check for updates with sudo apt list --upgradable . To resize a virtual disk, follow steps here, https://blog.surges.eu/virtualbox-how-to-solve-the-issue-with-low-disk-space/ . The nautilus desktop can be reset with sudo apt install gnome-tweak-tool gnome-tweaks nautilus-desktop The Unity desktop can be installed with sudo apt-get install unity-session sudo dpkg-reconfigure lightdm Non-root installation may be necessary, e.g., apt download gir1.2-webkit-3.0 apt -i gir1.2-webkit-3.0_2.4.11-3ubuntu3_amd64.deb --force-not-root --root=$HOME Alternatively, we use source package, typically apt-get source package cd package ./configure --prefix=$HOME make make install To unpack a package, one can do dpkg -x package.deb dir When a package URL is available, we can use wget, ar x, xz -d, tar xvf combination to do so. Its archive, http://archive.ubuntu.com/ubuntu/pool/universe , includes beagle, eigensoft, plink, plink-1.9, among others, which can be installed canonically with sudo apt install . sudo apt-get install libcanberra-gtk3-module It is useful to use sudo apt install tasksel , then one can use sudo tasksel . One would get error message such as \"You must put some \u2018source\u2019 URIs in your sources.list\" which can be done as follows sudo apt-get update sudo nano /etc/apt/sources.list # uncomment deb-src here apt-get source hello The system hibernation can be done with sudo systemctl hibernate . Some operations on gnome extensions are given below, sudo apt install gnome-shell-extensions sudo apt search gnome-shell-extension gnome-shell --help sudo apt install gnome-shell-extension-prefs # https://extensions.gnome.org/extension/307/dash-to-dock/ sudo gnome-extensions install dash-to-dockmicxgx.gmail.com.v71.shell-extension.zip sudo apt install gnome-tweaks Installation of dash-to-dock from GitHub proceeds as follows, git clone https://github.com/micheleg/dash-to-dock sudo apt install gettext make -C dash-to-dock install Note that gnome-tweak-tool used earlier on has been updated to gnome-tweaks , which removes gnome shell extensions support from version 40 and it is possible with gnome-shell-extension-prefs . Then the Extension apps enables 'dash-on-dock' as with user themes and Glassy/Glassy-dark through gnome-tweaks by unpacking the relevant files to /usr/share/themes . For WhiteSur, various operations are given below, # https://www.gnome-look.org/p/1403328/ # https://github.com/vinceliuice/WhiteSur-gtk-theme mkdir ~/.themes mkdir ~/.icons tar fvzx WhiteSur-gtk-theme-2022-02-21.tar.gz cd WhiteSur-gtk-theme-2022-02-21/ ./install.sh -s 220 sudo apt install dbus-x11 ./tweaks.sh -d tar xfz WhiteSur-icon-theme-2022-03-18.tar.gz cd WhiteSur-icon-theme-2022-03-18/ /install.sh cd ~/.icons # https://github.com/vinceliuice/WhiteSur-icon-theme # https://www.gnome-look.org/p/1405756/ tar xf 01-WhiteSur.tar.xz sudo apt install dconf-editor dconf-editor & gsettings set org.gnome.shell.extensions.dash-to-dock extend-height false gsettings set org.gnome.shell.extensions.dash-to-dock dock-position BOTTOM gsettings set org.gnome.shell.extensions.dash-to-dock transparency-mode FIXED gsettings set org.gnome.shell.extensions.dash-to-dock dash-max-icon-size 64 gsettings set org.gnome.shell.extensions.dash-to-dock unity-backlit-items true from dconf-editor, follow navigation \"org > gnome > shell > extensions > dash-to-dock\" to customise, see here . Finally, perhaps more appealing is the gnome extension dash-to-panel . Here is information on file sharing, https://www.c-sharpcorner.com/article/how-to-share-files-between-ubuntu-and-windows-10/ Oracle VirtualBox 7.1.4 This requires Visual Studio C++ 2.19, which can be downloaded, e.g. https://aka.ms/vs/17/release/vc_redist.x64.exe . Windows 7 To use VirtualBox under Windows 7, one needs to enable virtualisation within security section of BIOS setup. To find out system info, one can run systeminfo A useful tip is from https://blog.csdn.net/xz360717118/article/details/67638548 Failed to instantiate CLSID_VirtualBox w/ IVirtualBox, but CLSID_VirtualBox w/ IUnknown works. 2017\u5e7403\u670828\u65e5 16:50:30 \u4e00\u53ea\u732a\u513f\u866b \u9605\u8bfb\u6570 17551 \u6807\u7b7e\uff1a virtualbox win7 \u66f4\u591a \u4e2a\u4eba\u5206\u7c7b\uff1a \u670d\u52a1\u5668 \u7248\u6743\u58f0\u660e\uff1a\u672c\u6587\u4e3a\u535a\u4e3b\u539f\u521b\u6587\u7ae0\uff0c\u672a\u7ecf\u535a\u4e3b\u5141\u8bb8\u4e0d\u5f97\u8f6c\u8f7d\u3002 https://blog.csdn.net/xz360717118/article/details/67638548 \u6211\u662f win7 64\u4f4d \u89e3\u51b3\u529e\u6cd5\uff1a 1\uff0c win+r \u5feb\u6377\u952e\u6253\u5f00 \u201c\u8fd0\u884c\u201d\uff0c\u8f93\u5165regedit \u6253\u5f00\u6ce8\u518c\u8868 2\uff0c\u627e\u5230 HKEY_CLASSES_ROOT\\CLSID\\{00020420-0000-0000-C000-000000000046} InprocServer32 \u4fee\u6539 \u7b2c\u4e00\u884c\uff08\u9ed8\u8ba4\uff09\u7684\u503c\u4e3a C:\\Windows\\system32\\oleaut32.dll 3\uff0c\u627e\u5230HKEY_CLASSES_ROOT\\CLSID\\{00020424-0000-0000-C000-000000000046} InprocServer32 \u4fee\u6539 \u7b2c\u4e00\u884c\uff08\u9ed8\u8ba4\uff09\u7684\u503c\u4e3a C:\\Windows\\system32\\oleaut32.dll 4\uff0c\u5b8c\u4e8b\u3002\uff08\u6211\u4fee\u6539\u4e86\u5b8c\u4e86 \u4e5f\u6ca1\u8bd5\u7528\uff0c\u76f4\u63a5\u91cd\u542f\u7535\u8111 \u7136\u540e\u6210\u529f\u4e86\uff09 Actually, there is no need to reboot Windows at Step 4. It is possible that installation of Ubuntu could be freezed, in which case a proposal was to proceed with disabling 3D Acceleration and increasing the number of CPUs to 2-4, see https://www.maketecheasier.com/fix-ubuntu-freezing-virtualbox/ . However, our experiment showed that one can enable 3D Acceleration and two CPUs for installation but it is necessary to disable 3D Acceleration and reset CPU to be one for a system with one CPU after installation. The system informatino can be obtained with systeminfo command as described above. Fedora & shared folders The guest additions under Fedora 28 is furnished with sudo dnf update sudo dnf install gcc kernel-devel kernel-headers dkms make bzip2 perl cd /run/media/jhz22/VBox_GAs_5.2.12/ sudo ./VBoxLinuxAdditions.run To set up shared folders and enforce shared clipboard for bidirectional copy between Linux and Windows, # shared folders sudo mount -t vboxsf -o uid=jhz22 C /home/jhz22/C sudo mount -t vboxsf -o uid=jhz22 D /home/jhz22/D # shared clipboard killall VBoxClient sudo VBoxClient-all Another attempt is through VBoxMange, e.g., VBoxManage.exe sharedfolder add \"22.04\" --name U --hostpath \"U:\\\" . For Fedora 31, see https://www.if-not-true-then-false.com/2010/install-virtualbox-guest-additions-on-fedora-centos-red-hat-rhel/ . See https://www.nakivo.com/blog/make-virtualbox-full-screen/ on full-screen size, in particular, \"\\Program Files\\Oracle\\VirtualBox\\VBoxManage\" setextradata \"32\" VBoxInternal2/EfiGraphicsResolution 1920x1080 for virtual machine 32. Compression Here are the steps, quoting http://www.netreliant.com/news/8/17/Compacting-VirtualBox-Disk-Images-Linux-Guests.html , for compressing large .vdi: # Linux dd if=/dev/zero of=zerofillfile bs=1M rem Windows path D:\\Program Files\\Oracle\\VirtualBox VBoxManage modifyhd --compact \"ubuntu18.04.vdi\" OVA file This is useful, e.g., Windows 11 development environment . Import the OVA file into Oracle VirtualBox: Open Oracle VirtualBox. Select FileImport Appliance. Click Folder to browse to the directory where the OVA file was downloaded. Select the Okta Access Gateway OVA file, and then click Open. Click Next. See https://help.okta.com/oag/en-us/content/topics/access-gateway/deploy-ovb.htm . Windows 11 Official site From the official ISO location, https://aka.ms/DownloadWindows11 , select \"Download Windows 11 Disk Image (ISO) for x64 devices.\". Verify your download after download, https://www.microsoft.com/en-us/software-download/windows11 , from Windows PowerShell, Get-FileHash D:\\Downloads\\Win11_23H2_EnglishInternational_x64v2.iso Algorithm Hash Path --------- ---- ---- SHA256 705AC061688FFD7F5721DA844D01DF85433856EAFAA8441ECE94B270685CA2DB D:\\Downloads\\Win11_23H2_EnglishInternational_x64v2.iso and also Get-FileHash D:\\Downloads\\Win11_24H2_EnglishInternational_x64.iso Algorithm Hash Path --------- ---- ---- SHA256 D5A4C97C3E835C43B1B9A31933327C001766CE314608BA912F2FFFC876044309 D:\\Downloads\\Win11_24H2_EnglishInternational_x64.iso as listed here, https://learn.microsoft.com/en-us/powershell/module/microsoft.powershell.utility/?view=powershell-7.4 . Hash values for the ISO files for Each Language Country Locale Hash Code Arabic 64-bit 97ED2DF27DEBE5A8E94FB37BE4383EB8516D5C24117793BDA5C1E13568D3F754 Bulgarian 64-bit 1C1BD6E96290521B7E4F814ACA30C2CC4C7FAB1E3076439418673B90954A1FFC Chinese Simplified 64-bit BD1ECBA89BC59B7B62EF12C88C4E70D456EDAC10B969ECB3299EBD458B1F0FB3 Chinese Traditional 64-bit B92C3D6D428D12CF78A8D287B8FB28FFBC44D4A36B74C1B121C3CF443DC67ED2 Croatian 64-bit 95C7008AB1B0BA65212A36FB728AC93B9D7611400A78393FE7B319ED5B74AC5C Czech 64-bit 76120E535DB895D0EA69F47D55B836940821352A010DEDFCBCFC1E22E619FC4B Danish 64-bit D5D34DFFE45BA557581ADA14AC025DB8AA58989D054706061B7D0E5B47F5FDB9 Dutch 64-bit EE9BC545673D8F954A1EDAC691D749438D3E4DFA10A75D2519F79E3708D79FDC English 64-bit B56B911BF18A2CEAEB3904D87E7C770BDF92D3099599D61AC2497B91BF190B11 English International 64-bit D5A4C97C3E835C43B1B9A31933327C001766CE314608BA912F2FFFC876044309 Estonian 64-bit D12DC03FA15A7F436A800692E9BA30DBDDAA4CD6122DD71719A2898E953B5407 Finnish 64-bit D6057E058021A9DF8A02B7BB16331D88C38E8BB63D5AD897D094E0DF6C6ADB5A French 64-bit B73AA55DB50D2AD348F61C6537DA05C0D6DED78A143763454E977BE85B444119 French Canadian 64-bit 3CA47351DAD16BD3F7AFA27CDEB321DD726B79859DE8D2769359C7621DE38EC4 German 64-bit 96E036F1219F9EE59F96312CE43EC7DF093E768383A77132750271940926A013 Greek 64-bit 92248F9F5A8735337D4B0DCCE4DD13348F8718858590FD9D1EB00020B5AFB33A Hebrew 64-bit 27D8090B9266A2DAC04E403FE63E46ADE661A5661BF26CA5EBB1A2F13245E86F Hungarian 64-bit 7B58807592AEB2FC5DDF5AF9749FA023CE9165AA4A1BF4F4741709F8AA2EE9D3 Italian 64-bit D95EC65EC06B4036835C7571FE0108159848D2883EF5DA3A67E480130B1F5862 Japanese 64-bit A1E1BCB6C014F39E4A324EC24DB1B745EE62617D29A450BF7B2596A3DCCECD7D Korean 64-bit 63ED86ADFC53F464649478F931EAE39A42DB3FD86C266C9B5AF7F8E19D318C51 Latvian 64-bit F4C2BF7C16576E6D631070D7B7CF6F55E8359D0729B571C570DC6F39D77D9EA6 Lithuanian 64-bit 23B14643B0AD6FDD0231EA201C5E1B000912EE3A0542F1B1F6907DB470AE7D7B Norwegian 64-bit 6CBD6C3FED9CE08AF85420F19D01C287FC58EC0C42DC7409D1D5C341CEB6492B Polish 64-bit 654273603A945EBA3B185FD5D2C22207A0EB788B5E3402F71E6D0839B3026943 Brazilian Portuguese 64-bit 1BC63E9C62FE3EB7E46778F24C790933770FA7430304583BBDF96B47A5D61F1F Portuguese 64-bit FAAFEDFC301A381B0712FC8DB9F0A16ADE2716B998DD4855D0A38172A9A87AB4 Romanian 64-bit 8B23AD43DF35EB75FADDFDFC85D616A001A4D72C757E5286011E3DC9452A5862 Russian 64-bit D0FBDB93864BF6C8ADE844473C9600EBB031C8BB656A272C736E45DFBD9B3BC9 Serbian Latin 64-bit 566047460EEA2F0E0D36E7A378DCFEEA79D7D3C0328227646BE4AB9AC39A9E36 Slovak 64-bit 5B77F2B5F7C77ABF68E628AC37A8841BB1058B7173C1C76DC5A5F6C5BBA855FD Slovenian 64-bit 73F0DC7CC15885F565C76D78D54E4E4D9934720FFE583B52EFDDA2E2457402D7 Spanish 64-bit 708AF7C9AC63B7EB045CA9B196568758B6C1749E8D13CADE61FAACBC7C66D142 Spanish (Mexico) 64-bit BA7A9B9A2052DEF8C24BCB88C76A47B2E6A6C6EE547EED226B9702C5C63AFC69 Swedish 64-bit F3674D377253E2D12635FDEAB76193E80BD80C56A41D10AB9CCEFDB0CDF1AD82 Thai 64-bit CAD5590347376103E369D7E04941B94C037F4CC6C75D81DC50E194FFA87C8CF8 Turkish 64-bit A47E054FE0B762F5B48D08E2B6F768F4B1CA0BC6DB831A76E829F92E578483AF Ukrainian 64-bit 68BABB954E4BFDF8A03ABC188D5120CF12D4DCA3CAE68EB1BFD4B64F872826E2 Some changes as from October 2024, Microsoft Windows [Version 10.0.22631.4249] (c) Microsoft Corporation. All rights reserved. C:\\Windows\\System32>reg.exe delete \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\CompatMarkers\" /f 2>NUL The operation completed successfully. C:\\Windows\\System32>reg.exe delete \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\Shared\" /f 2>NUL The operation completed successfully. C:\\Windows\\System32>reg.exe delete \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\TargetVersionUpgradeExperienceIndicators\" /f 2>NUL The operation completed successfully. C:\\Windows\\System32>reg.exe add \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\HwReqChk\" /f /v HwReqChkVars /t REG_MULTI_SZ /s , /d \"SQ_SecureBootCapable=TRUE,SQ_SecureBootEnabled=TRUE,SQ_TpmVersion=2,SQ_RamMB=8192,\" The operation completed successfully. C:\\Windows\\System32> C:\\Windows\\System32>reg.exe add \"HKLM\\SYSTEM\\Setup\\MoSetup\" /f /v AllowUpgradesWithUnsupportedTPMOrCPU /t REG_DWORD /d 1 The operation completed successfully. C:\\Windows\\System32> See also ZDNET, https://www.zdnet.com/article/how-to-upgrade-your-incompatible-windows-10-pc-to-windows-11/ & Rufus, https://rufus.ie/en/ . It is reported that the changes above have been implemented in Rufus 4.6. Build on the fly git clone https://github.com/AveYo/MediaCreationTool.bat cd MediaCreationTool.bat MediaCreationTool.bat and also https://massgrave.dev/windows_11_links , https://github.com/Raphire/Win11Debloat . Further information CloneVDI is described in this thread, https://forums.virtualbox.org/viewtopic.php?f=6&t=22422 . Moreover, https://www.maketecheasier.com/shrink-your-virtualbox-vm/ and http://bytefreaks.net/windows/reclaim-empty-space-from-virtualbox-vdi-disk-images-by-shrinking . vdi.md as in GWAS-2017 and now listed in GDCT Since one may allocate only part of RAM to VirtualBox, it is often necessary to run program under MS-DOS, e.g., sections on DEPICT. Additional note: 6.1.4 has problem with its Guest Additions. To enable copy/paste through clipboard one can use VBoxGuestAdditions_6.1.97-136310.iso as discused here, https://www.virtualbox.org/ticket/19336. When VirtualBox failed to start a session, one can enter MS-DOS prompt as adminstrator, and issue command bcdedit /set hypervisorlaunchtype off and restart the computer. When there is error message VirtualBox Failed to open session for Virtual Machine , then right click the machine and Discard Saved state . Otherwise, uninstall VirtualBox and reinstall. VirtualBox 7.x.x is considerably easier to set up. SystemRescue See https://www.system-rescue.org/ Windows Hyper-V Web page: https://docs.microsoft.com/en-us/virtualization/hyper-v-on-windows/quick-start/enable-hyper-v Enable Hyper-V to create virtual machines on Windows 10. Hyper-V can be enabled in many ways including using the Windows 10 control panel, PowerShell or using the Deployment Imaging Servicing and Management tool (DISM). This documents walks through each option: Control Panel --> Programs --> Programs and Features --> Turn Windows Features on or off : Hyper-V , Virtual Machine Platforms . WSL Official page: https://github.com/microsoft/WSL To check version of Windows, issue winver (MS-DOS Prompt: Windows + r, cmd, winver). Installation and setup A description on PowerShell is here, https://learn.microsoft.com/en-us/windows/wsl/install . To initiate from PowerShell, use dism /online /enable-feature /feature-name:Microsoft-Windows-Subsystem-Linux /all /norestart dism /online /enable-feature /featurename:VirtualMachinePlatform /all /norestart After installation, it can be invoked from a MS-DOS Prompt with wsl -help wsl --list wsl --list --online wsl --list --verbose wsl --distribution Ubuntu wsl --set-default Ubuntu-20.04 The command could also takes additional parameters, e.g., -d debian. One can also create a desktop entry pointing to C:\\Windows\\system32\\wsl.exe. Location of the distros are found by cd %LocalAppData%\\Packages\\ cd CanonicalGroupLimited.UbuntuonWindows_79rhkp1fndgsc For old Windoes build, there is complaint about writing BIOS then an update called wsl_update_x64.msi is required. A comparison of WSL 1 and WSL 2 can be seen from https://aka.ms/wsl2 ( https://learn.microsoft.com/en-us/windows/wsl/compare-versions ). To migrate from WSL 1 to WSL 2, several options are possible, # A distribution only wsl --set-version Ubuntu 2 wsl --set-version Ubuntu-20.04 2 # WSL as a whole wsl --set-default-version 2 and return with error messages, Conversion in progress, this may take a few minutes... For information on key differences with WSL 2 please visit https://aka.ms/wsl2 The requested operation could not be completed due to a virtual disk system limitation. Virtual hard disk files must be uncompressed and unencrypted and must not be sparse. To fix this, as in https://logfetch.com/wsl2-uncompressed/ , open up File Explorer and navigate to: C:\\Users\\YOUR_USER\\AppData\\Local\\Packages\\CanonicalGroupLimited... Right click on LocalState , then Properties , then Advanced . Ensure Compress contents to save disk space and Encrypt contents to secure data are both deselected. Click OK , then Apply , then Apply changes to this folder only Software mobaXterm, https://mobaxterm.mobatek.net/ offers Advanced WSL settings , Graphical environments such as Gnome-desktop/LDXE-desktop/XFCE4-desktop so as to create sessions using graphical desktops. The corresponding installations are ubuntu-gnome-desktop (gnome-session-bin), lubuntu-desktop , xubuntu-desktop (in fact xfce4-session, xubuntu-core, xubuntu-default-settings), respectively. After started, create a session for WSL, open it and install software. sudo apt update sudo apt install nautilus sudo apt install firefox etc. By default C:\\ is /mnt/c. To establish other drives, one can do sudo mkdir /mnt/d sudo mount -t drvfs D: /mnt/d sudo umount /mnt/d The network drive can also be mapped, e.g., sudo mkdir /mnt/u sudo mount -t drvfs '\\\\me-filer1.medschl.cam.ac.uk\\home$\\jhz22' /mnt/u sudo mount -t drvfs U: /mnt/u See https://www.cyberciti.biz/faq/ubuntu-linux-install-gnome-desktop-on-server/ . See also https://www.makeuseof.com/tag/linux-desktop-windows-subsystem/ . ubuntu-wsl is a set of WSL utilities, which could be installed with sudo apt install ubuntu-wsl for executables at /usr/bin , e.g., wslvar PATH for Windows' \\%PATH environment variable and wslsys for basic information, wslusc to create a short cut on Windows desktop. GUI apps This is available from https://learn.microsoft.com/en-us/windows/wsl/tutorials/gui-apps . Example applications include gedit, gimp, nautilus, vlc, x11-apps, microsoft-edge ( https://www.microsoftedgeinsider.com/en-us/download?platform=linux-deb ). In case of problems, try Intel\u00ae Driver & Support Assistant (Intel\u00ae DSA), https://www.intel.co.uk/content/www/uk/en/support/detect.html . These files are described https://ubuntu.com/tutorials/install-ubuntu-on-wsl2-on-windows-11-with-gui-support#1-overview showing octave: We start with octave --gui and open scripts julia.m and juliatest.m , select run and then save file and run for figure julia.png , . The flowblade GUI is available with sudo apt-get install flowblade . After installation, one can start with export PYTHONPATH=/usr/share/flowblade/Flowblade:/usr/share/flowblade/Flowblade/vieweditor/:/usr/share/flowblade/Flowblade/tools export SHORTCUTS_PATH=/usr/share/flowblade/Flowblade/res /usr/bin/flowblade or cd /bin;./flowblade , https://github.com/jliljebl/flowblade/issues/857 . xfce4 & rdp xfce4 can also be made available with sudo apt update sudo apt install xorg sudo apt install xfce4 echo xfce4-session > ~/.xsession xfce4-session & or work with xrdp: sudo apt install xrdp sudo cp /etc/xrdp/xrdp.ini /etc/xrdp/xrdp.ini.bak sudo sed -i 's/port=3389/port=3390/' /etc/xrdp/xrdp.ini sudo /etc/init.d/xrdp restart One can add line sudo service xrdp start to ~/.bashrc . Moreover from a start up directory such as %UserProfile%, create a file named .wslconfig with lines such as processors=4 memory=4GB swap=4GB localhostForwarding=true The remote desktop can be started from DOS Prompt mstsc for localhost:3390 (127.0.0.1:3390). Programs such as FireFox can be started. To avoid running the service from every terminal session, amend the service xrdp as follows, export plus=$(service --status-all 2>&1 | grep xrdp | cut -d' ' -f3) if [[ \"$plus\" != \"+\" ]]; then echo $plus sudo service xrdp start fi Earlier note: We use check IPv4 address from Windows as follows with systeminfo (or ipconfig ) and start rdp to :3390. To avoid a dark screen, select Applications --> Settings --> Light Locker Settings --> Automatically lock the screen --> Never , Apply . One may also execute wsl --shutdown to reninitialize. Compression of disk wsl --shutdown diskpart select vdist file=\"D:\\wsl\\Ubuntu-2204\\ext4.vhdx\" compact vdisk echo \"%LocalAppData%\\Packages\\CanonicalGroupLimited.UbuntuonWindows_79rhkp1fndgsc\\\\LocalState\\ext4.vhdx\" select vdisk file=\"C:\\Users\\User\\AppData\\Local\\Packages\\CanonicalGroupLimited.UbuntuonWindows_79rhkp1fndgsc\\LocalState\\ext4.vhdx\" compact vdisk Downloading specific distributions The list is available from https://learn.microsoft.com/en-us/windows/wsl/install-manual but alternatives are also possible. # Ubuntu 21.10 wget https://cloud-images.ubuntu.com/releases/impish/release/ubuntu-21.10-server-cloudimg-amd64-wsl.rootfs.tar.gz mkdir d:\\wsl wsl --import Ubuntu-2110 d:/WSL/Ubuntu-2110 d:/Downloads/ubuntu-21.10-server-cloudimg-amd64-wsl.rootfs.tar.gz wsl -d Ubuntu-2110 adduser jhz22 usermod -aG sudo jhz22 su jhz22 ln -s /mnt/c C ln -s /mnt/d D ln -s /mnt/f F # restart wsl -d Ubuntu-2110 -u jhz22 # Ubuntu 22.04 # https://cloud-images.ubuntu.com/releases/22.04/release-20220923/ubuntu-22.04-server-cloudimg-arm64-wsl.rootfs.tar.gz Windows applications To start firefox or Chrome, here is a simple way, cmd.exe /c start https://github.com echo cmd.exe /c start https://github.com > ${HOME}/bin/edge chmod +x ${HOME}/bin/edge edge It is possible with default applications under Windows, e.g., cmd.exe /c u:/work/eQTL-MR.pptx which opens up PowerPoint directly. One can actually generalize these, e.g., ln -s $HOME/C/Program\\ Files\\ \\(x86\\)/Adobe/Acrobat\\ Reader\\ DC/Reader/AcroRd32.exe /home/$USER/bin/AcroRd32.exe ln -s $HOME/bin/AcroRd32.exe /home/$USER/bin/xpdf followed by a call to AcroRd32.exe and as xpdf , or directly call a list of programs: calc.exe , comp.exe , control.exe , curl.exe , fc.exe , find.exe , finger.exe , mspaint.exe , net.exe , sort.exe , tar.exe , whoami.exe , write.exe , xcopy.exe . Uninstallation This is achieved by # via wsl wsl --unregister Ubuntu-20.04 # via wslconfig wslconfig /u Ubuntu-20.04 Anaconda Once installed, it is customary to make several channels accessible, conda config --add channels defaults conda config --add channels conda-forge conda config --add channels bioconda Package in conda-forge include boost, django, glpk, gnuplot, go, gperf, hdf5, ipython, jquery, julia, jupyter, keras, limix, mercurial, miktex, mysql, nano, numpy, pandas, sage, scikit-learn, zlib. Packages in bioconda includes amos, bcftools, beagle, bedops, bedtools, blast, bowtie, bowtie2, bwa, chromhmm, circos, deeptools, emmix, ensembl-vep, fastlmm, fastqc, gatk, gatk4, hclust2, himmer, himmer2, hisat2, igv, impute2, lofreq, mapsplice, mrbayes, ms, nanostat, paml, pbgzip, phylip, picard, plink, plink2, r-wgcna, rsem, rtg-tools, sambamba, samtools, seqkt, sequana, snpeff, snpsift, sra-tools, star, stringtie, tabix, tophat, ucsc-blat, ucsc-liftover, vcftools. For instance, to install intervaltree as required by depict, the following is sufficience, conda install intervaltree All the packages installed can be seen with conda list . To install java, run following command conda install -c anaconda openjdk Other installations include perl, R. Note that conda under Windows is in typically D:/ProgramData/Anaconda2/Library/bin. Altogether we really need to set path=%path%;D:/ProgramData/Anaconda2;D:/ProgramData/Anaconda2/Library/bin Miniconda is available from https://conda.io/miniconda.html . Installation from scratch, wget https://repo.anaconda.com/archive/Anaconda2-2019.03-Linux-ppc64le.sh sh Anaconda2-2019.03-Linux-ppc64le.sh # do not activate at startup conda config --set auto_activate_base false export PYTHONPATH=/scratch/jhz22/lib/python2.7/site-packages/ Examine .bashrc for changes. See https://docs.anaconda.com/anaconda/user-guide/getting-started/ to get started. GitHub See physalia for information. It has been suggested to display math using the following premium in a GitHub page, A current repository on GitHub is here, https://cambridge-ceu.github.io/GitHub-matters/ . mercurial This is associated with the familiar hg command as used for instance by qctool . It is the executable file for Mercurial source code management system, sudo apt install mercurial libraOffice sudo add-apt-repository ppa:libreoffice/ppa sudo apt-get update sudo apt-get install libreoffice Linuxbrew Follow http://linuxbrew.sh/ and possibly https://docs.brew.sh sudo apt-get install build-essential sh -c \"$(curl -fsSL https://raw.githubusercontent.com/Linuxbrew/install/master/install.sh)\" echo 'export PATH=\"/home/linuxbrew/.linuxbrew/bin:$PATH\"' >>~/.profile echo 'export MANPATH=\"/home/linuxbrew/.linuxbrew/share/man:$MANPATH\"' >>~/.profile echo 'export INFOPATH=\"/home/linuxbrew/.linuxbrew/share/info:$INFOPATH\"' >>~/.profile PATH=\"/home/linuxbrew/.linuxbrew/bin:$PATH\" AWS http://aws.amazon.com/ The Open Guide to Amazon Web Services . E.g., https://sites.google.com/site/jpopgen/wgsa/create-an-aws-account https://sites.google.com/site/jpopgen/wgsa/launch-an-instance https://sites.google.com/site/jpopgen/wgsa/terminate-an-instance modules It is a system that allows you to easily change between different versions of compilers and other software. Here explains how to set up globally, # https://www.microbialsystems.cn/en/post/xubuntu_env_modules/ wget http://archive.ubuntu.com/ubuntu/pool/universe/m/modules/modules_5.2.0.orig.tar.xz xz -d modules_5.2.0.orig.tar.xz tar xvf modules_5.2.0.orig.tar cd modules-5.2.0 sudo apt-get install tcl-dev tk-dev ./configure make sudo make install ls /usr/local/Modules source /usr/local/Modules/init/bash sudo ln -s /usr/local/Modules/init/profile.sh /etc/profile.d/modules.sh sudo ln -s /usr/local/Modules/init/profile.csh /etc/profile.d/modules.csh echo -e \"\\n# For initiating Modules\" | sudo tee -a /etc/bash.bashrc > /dev/null # Append a line to the end of this file with no return message. echo \". /etc/profile.d/modules.sh\" | sudo tee -a /etc/bash.bashrc > /dev/null less /usr/local/Modules/init/profile.sh module avail module list Modification can be seen, e.g., function module () { curl -sf -XPOST http://modules-mon.hpc.cam.ac.uk/action -H 'Content-Type: application/json' -d '{ \"username\":\"'$USER'\", \"hostname\":\"'$HOSTNAME'\", \"command\":\"'\"$*\"'\" }' 2>&1 > /dev/null; eval `/usr/bin/modulecmd bash $*` } module load matlab/r2014a matlab $@ Usually the eval line is sufficient. docker See https://www.docker.com/ and https://docs.docker.com/ # https://docs.docker.com/engine/install/ubuntu/ for pkg in docker.io docker-doc docker-compose docker-compose-v2 podman-docker containerd runc; do sudo apt-get remove $pkg; done # Add Docker's official GPG key: sudo apt-get update sudo apt-get install ca-certificates curl sudo install -m 0755 -d /etc/apt/keyrings sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc sudo chmod a+r /etc/apt/keyrings/docker.asc # Add the repository to Apt sources: echo \\ \"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \\ $(. /etc/os-release && echo \"$VERSION_CODENAME\") stable\" | \\ sudo tee /etc/apt/sources.list.d/docker.list > /dev/null sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin For instance, git clone https://docs.docker.com/engine/install/ubuntu/ docker run --rm -it neoaggelos/knotify /knotify/bin/rna_analysis --sequence AAAAAACUAAUAGAGGGGGGACUUAGCGCCCCCCAAACCGUAACCCC giving CCCAAACCGUAACCCC Sequence: AAAAAACUAAUAGAGGGGGGACUUAGCGCCCCCCAAACCGUAACCCC Structure: ..............((((((.....[[[))))))....]]]...... Energy: -13.800000190734863 Duration: 0.297946 s By default, Docker is only accessible with root privileges (sudo). As a regular user, add your user to the docker group. sudo addgroup --system docker sudo adduser $USER docker newgrp docker sudo snap disable docker sudo snap enable docker For Fedora 33, we have udo dnf -y install dnf-plugins-core sudo dnf config-manager --add-repo https://download.docker.com/linux/fedora/docker-ce.repo sudo dnf install docker-ce docker-ce-cli containerd.io sudo systemctl start docker sudo docker pull eqtlcatalogue/susie-finemapping:v20.08.1 sudo docker run eqtlcatalogue/susie-finemapping:v20.08.1 # remove docker engine # sudo dnf remove docker-ce docker-ce-cli containerd.io # removed all images, containers, and volumes sudo rm -rf /var/lib/docker OpenVPN See https://github.com/OpenVPN/openvpn-gui . Usage example: sudo openvpn --config myconfig.vopn Time Zone An example under Fedora, timedatectl list_timezones timedatectl set-timezone Europe/London which is useful to synchronise with files obtained from elsewhere. Web-Linux ssh terminal ShellinaBox sudo apt update sudo apt-get install shellinabox sudo systemctl enable shellinaboxd # restart/start/stop sudo service shellinabox restart sudo systemctl start shellinaboxd sudo systemctl restart shellinaboxd sudo systemctl stop shellinaboxd # alternatives which work sudo /etc/init.d/shellinabox start sudo /etc/init.d/shellinabox stop /etc/init.d/shellinabox status so firefox https://127.0.0.1:4200 & allows for login from firefox. cpolar Web: https://www.cpolar.com ( https://dashboard.cpolar.com/signup , https://dashboard.cpolar.com/login , https://dashboard.cpolar.com/get-started ) sudo apt install curl curl -L https://www.cpolar.com/static/downloads/install-release-cpolar.sh | sudo bash cpolar sudo systemctl enable cpolar sudo systemctl start cpolar firefox localhost:9200 & Featured articles Don't like your Linux desktop? Here's how to install an alternative, https://www.zdnet.com/article/dont-like-your-linux-desktop-heres-how-to-install-an-alternative/ .","title":"SYSTEMS"},{"location":"SYSTEMS/#systems","text":"This is a skeleton to list items to be detailed in the near future.","title":"Systems"},{"location":"SYSTEMS/#freedos-and-linux","text":"FreeDOS is available from http://www.freedos.org/ . FreeDOS is an open source DOS-compatible operating system that you can use to play classic DOS games, run legacy business software, or develop embedded systems. Any program that works on MS-DOS should also run on FreeDOS. It is notable that v1.3 provides liveCD and liteUSB which could be useful. Here describes how to convert VMDK format to iso, https://www.ilovefreesoftware.com/26/featured/how-to-convert-vmdk-to-iso-in-windows.html , e.g., qemu-img convert -f vmdk FD13LITE.VMDK pd.raw dd if=pd.raw of=pd.iso We can then use rufus, https://rufus.ie/ , to generate a bootable USB allowing for disk partition by fdisk -- in fact rufus itself can produce a bootable USB nevertheless with no utilities. This is useful to install Linux on very old computers, e.g., reorganise hard drive and then install Fedora from a liveUSB generated from Fedora Media Writer, https://getfedora.org/en/workstation/download/ . There are multiple routes to install particular Linux software; one may prefer to install them as standable but it may also come handy use mini-environments such as Anaconda, Miniconda, Linuxbrew or those already in system (e.g. Ubuntu) archive. A rich source of tips are in the-art-of-command-line and awesome-shell . The following command gives bit information (32 or 64) getconf LONG_BIT The LSB (Linux Standard Base) and distribution information is given with lsb_release -a Under Ubuntu, this could be made available with sudo apt-get install lsb-core . Under Fedora, you may be prompted to install package redhat-lsb-core . Related commands are uname -a and lscpu . The CPU speed can be seen with watch -n.1 \"cat /proc/cpuinfo | grep \\\"^[c]pu MHz\\\"\" The screen utility is operated as follows, screen -S screen -ls screen -r An introduction to Linux Access Control Lists (ACLs), https://www.redhat.com/sysadmin/linux-access-control-lists . To enable color with nano, try find /usr/share/nano/ -iname \"*.nanorc\" -exec echo include {} \\; >> ~/.nanorc","title":"FreeDOS and Linux"},{"location":"SYSTEMS/#fedora","text":"This has already been covered above, i.e., https://getfedora.org/workstation/download/ .","title":"Fedora"},{"location":"SYSTEMS/#ubuntu","text":"Releases are available from http://releases.ubuntu.com and packages are listed at https://packages.ubuntu.com/ . Often it is helpful to run sudo apt update sudo apt upgrade to be in line with the current release; one can check for updates with sudo apt list --upgradable . To resize a virtual disk, follow steps here, https://blog.surges.eu/virtualbox-how-to-solve-the-issue-with-low-disk-space/ . The nautilus desktop can be reset with sudo apt install gnome-tweak-tool gnome-tweaks nautilus-desktop The Unity desktop can be installed with sudo apt-get install unity-session sudo dpkg-reconfigure lightdm Non-root installation may be necessary, e.g., apt download gir1.2-webkit-3.0 apt -i gir1.2-webkit-3.0_2.4.11-3ubuntu3_amd64.deb --force-not-root --root=$HOME Alternatively, we use source package, typically apt-get source package cd package ./configure --prefix=$HOME make make install To unpack a package, one can do dpkg -x package.deb dir When a package URL is available, we can use wget, ar x, xz -d, tar xvf combination to do so. Its archive, http://archive.ubuntu.com/ubuntu/pool/universe , includes beagle, eigensoft, plink, plink-1.9, among others, which can be installed canonically with sudo apt install . sudo apt-get install libcanberra-gtk3-module It is useful to use sudo apt install tasksel , then one can use sudo tasksel . One would get error message such as \"You must put some \u2018source\u2019 URIs in your sources.list\" which can be done as follows sudo apt-get update sudo nano /etc/apt/sources.list # uncomment deb-src here apt-get source hello The system hibernation can be done with sudo systemctl hibernate . Some operations on gnome extensions are given below, sudo apt install gnome-shell-extensions sudo apt search gnome-shell-extension gnome-shell --help sudo apt install gnome-shell-extension-prefs # https://extensions.gnome.org/extension/307/dash-to-dock/ sudo gnome-extensions install dash-to-dockmicxgx.gmail.com.v71.shell-extension.zip sudo apt install gnome-tweaks Installation of dash-to-dock from GitHub proceeds as follows, git clone https://github.com/micheleg/dash-to-dock sudo apt install gettext make -C dash-to-dock install Note that gnome-tweak-tool used earlier on has been updated to gnome-tweaks , which removes gnome shell extensions support from version 40 and it is possible with gnome-shell-extension-prefs . Then the Extension apps enables 'dash-on-dock' as with user themes and Glassy/Glassy-dark through gnome-tweaks by unpacking the relevant files to /usr/share/themes . For WhiteSur, various operations are given below, # https://www.gnome-look.org/p/1403328/ # https://github.com/vinceliuice/WhiteSur-gtk-theme mkdir ~/.themes mkdir ~/.icons tar fvzx WhiteSur-gtk-theme-2022-02-21.tar.gz cd WhiteSur-gtk-theme-2022-02-21/ ./install.sh -s 220 sudo apt install dbus-x11 ./tweaks.sh -d tar xfz WhiteSur-icon-theme-2022-03-18.tar.gz cd WhiteSur-icon-theme-2022-03-18/ /install.sh cd ~/.icons # https://github.com/vinceliuice/WhiteSur-icon-theme # https://www.gnome-look.org/p/1405756/ tar xf 01-WhiteSur.tar.xz sudo apt install dconf-editor dconf-editor & gsettings set org.gnome.shell.extensions.dash-to-dock extend-height false gsettings set org.gnome.shell.extensions.dash-to-dock dock-position BOTTOM gsettings set org.gnome.shell.extensions.dash-to-dock transparency-mode FIXED gsettings set org.gnome.shell.extensions.dash-to-dock dash-max-icon-size 64 gsettings set org.gnome.shell.extensions.dash-to-dock unity-backlit-items true from dconf-editor, follow navigation \"org > gnome > shell > extensions > dash-to-dock\" to customise, see here . Finally, perhaps more appealing is the gnome extension dash-to-panel . Here is information on file sharing, https://www.c-sharpcorner.com/article/how-to-share-files-between-ubuntu-and-windows-10/","title":"Ubuntu"},{"location":"SYSTEMS/#oracle-virtualbox","text":"","title":"Oracle VirtualBox"},{"location":"SYSTEMS/#714","text":"This requires Visual Studio C++ 2.19, which can be downloaded, e.g. https://aka.ms/vs/17/release/vc_redist.x64.exe .","title":"7.1.4"},{"location":"SYSTEMS/#windows-7","text":"To use VirtualBox under Windows 7, one needs to enable virtualisation within security section of BIOS setup. To find out system info, one can run systeminfo A useful tip is from https://blog.csdn.net/xz360717118/article/details/67638548 Failed to instantiate CLSID_VirtualBox w/ IVirtualBox, but CLSID_VirtualBox w/ IUnknown works. 2017\u5e7403\u670828\u65e5 16:50:30 \u4e00\u53ea\u732a\u513f\u866b \u9605\u8bfb\u6570 17551 \u6807\u7b7e\uff1a virtualbox win7 \u66f4\u591a \u4e2a\u4eba\u5206\u7c7b\uff1a \u670d\u52a1\u5668 \u7248\u6743\u58f0\u660e\uff1a\u672c\u6587\u4e3a\u535a\u4e3b\u539f\u521b\u6587\u7ae0\uff0c\u672a\u7ecf\u535a\u4e3b\u5141\u8bb8\u4e0d\u5f97\u8f6c\u8f7d\u3002 https://blog.csdn.net/xz360717118/article/details/67638548 \u6211\u662f win7 64\u4f4d \u89e3\u51b3\u529e\u6cd5\uff1a 1\uff0c win+r \u5feb\u6377\u952e\u6253\u5f00 \u201c\u8fd0\u884c\u201d\uff0c\u8f93\u5165regedit \u6253\u5f00\u6ce8\u518c\u8868 2\uff0c\u627e\u5230 HKEY_CLASSES_ROOT\\CLSID\\{00020420-0000-0000-C000-000000000046} InprocServer32 \u4fee\u6539 \u7b2c\u4e00\u884c\uff08\u9ed8\u8ba4\uff09\u7684\u503c\u4e3a C:\\Windows\\system32\\oleaut32.dll 3\uff0c\u627e\u5230HKEY_CLASSES_ROOT\\CLSID\\{00020424-0000-0000-C000-000000000046} InprocServer32 \u4fee\u6539 \u7b2c\u4e00\u884c\uff08\u9ed8\u8ba4\uff09\u7684\u503c\u4e3a C:\\Windows\\system32\\oleaut32.dll 4\uff0c\u5b8c\u4e8b\u3002\uff08\u6211\u4fee\u6539\u4e86\u5b8c\u4e86 \u4e5f\u6ca1\u8bd5\u7528\uff0c\u76f4\u63a5\u91cd\u542f\u7535\u8111 \u7136\u540e\u6210\u529f\u4e86\uff09 Actually, there is no need to reboot Windows at Step 4. It is possible that installation of Ubuntu could be freezed, in which case a proposal was to proceed with disabling 3D Acceleration and increasing the number of CPUs to 2-4, see https://www.maketecheasier.com/fix-ubuntu-freezing-virtualbox/ . However, our experiment showed that one can enable 3D Acceleration and two CPUs for installation but it is necessary to disable 3D Acceleration and reset CPU to be one for a system with one CPU after installation. The system informatino can be obtained with systeminfo command as described above.","title":"Windows 7"},{"location":"SYSTEMS/#fedora-shared-folders","text":"The guest additions under Fedora 28 is furnished with sudo dnf update sudo dnf install gcc kernel-devel kernel-headers dkms make bzip2 perl cd /run/media/jhz22/VBox_GAs_5.2.12/ sudo ./VBoxLinuxAdditions.run To set up shared folders and enforce shared clipboard for bidirectional copy between Linux and Windows, # shared folders sudo mount -t vboxsf -o uid=jhz22 C /home/jhz22/C sudo mount -t vboxsf -o uid=jhz22 D /home/jhz22/D # shared clipboard killall VBoxClient sudo VBoxClient-all Another attempt is through VBoxMange, e.g., VBoxManage.exe sharedfolder add \"22.04\" --name U --hostpath \"U:\\\" . For Fedora 31, see https://www.if-not-true-then-false.com/2010/install-virtualbox-guest-additions-on-fedora-centos-red-hat-rhel/ . See https://www.nakivo.com/blog/make-virtualbox-full-screen/ on full-screen size, in particular, \"\\Program Files\\Oracle\\VirtualBox\\VBoxManage\" setextradata \"32\" VBoxInternal2/EfiGraphicsResolution 1920x1080 for virtual machine 32.","title":"Fedora & shared folders"},{"location":"SYSTEMS/#compression","text":"Here are the steps, quoting http://www.netreliant.com/news/8/17/Compacting-VirtualBox-Disk-Images-Linux-Guests.html , for compressing large .vdi: # Linux dd if=/dev/zero of=zerofillfile bs=1M rem Windows path D:\\Program Files\\Oracle\\VirtualBox VBoxManage modifyhd --compact \"ubuntu18.04.vdi\"","title":"Compression"},{"location":"SYSTEMS/#ova-file","text":"This is useful, e.g., Windows 11 development environment . Import the OVA file into Oracle VirtualBox: Open Oracle VirtualBox. Select FileImport Appliance. Click Folder to browse to the directory where the OVA file was downloaded. Select the Okta Access Gateway OVA file, and then click Open. Click Next. See https://help.okta.com/oag/en-us/content/topics/access-gateway/deploy-ovb.htm .","title":"OVA file"},{"location":"SYSTEMS/#windows-11","text":"","title":"Windows 11"},{"location":"SYSTEMS/#official-site","text":"From the official ISO location, https://aka.ms/DownloadWindows11 , select \"Download Windows 11 Disk Image (ISO) for x64 devices.\". Verify your download after download, https://www.microsoft.com/en-us/software-download/windows11 , from Windows PowerShell, Get-FileHash D:\\Downloads\\Win11_23H2_EnglishInternational_x64v2.iso Algorithm Hash Path --------- ---- ---- SHA256 705AC061688FFD7F5721DA844D01DF85433856EAFAA8441ECE94B270685CA2DB D:\\Downloads\\Win11_23H2_EnglishInternational_x64v2.iso and also Get-FileHash D:\\Downloads\\Win11_24H2_EnglishInternational_x64.iso Algorithm Hash Path --------- ---- ---- SHA256 D5A4C97C3E835C43B1B9A31933327C001766CE314608BA912F2FFFC876044309 D:\\Downloads\\Win11_24H2_EnglishInternational_x64.iso as listed here, https://learn.microsoft.com/en-us/powershell/module/microsoft.powershell.utility/?view=powershell-7.4 . Hash values for the ISO files for Each Language Country Locale Hash Code Arabic 64-bit 97ED2DF27DEBE5A8E94FB37BE4383EB8516D5C24117793BDA5C1E13568D3F754 Bulgarian 64-bit 1C1BD6E96290521B7E4F814ACA30C2CC4C7FAB1E3076439418673B90954A1FFC Chinese Simplified 64-bit BD1ECBA89BC59B7B62EF12C88C4E70D456EDAC10B969ECB3299EBD458B1F0FB3 Chinese Traditional 64-bit B92C3D6D428D12CF78A8D287B8FB28FFBC44D4A36B74C1B121C3CF443DC67ED2 Croatian 64-bit 95C7008AB1B0BA65212A36FB728AC93B9D7611400A78393FE7B319ED5B74AC5C Czech 64-bit 76120E535DB895D0EA69F47D55B836940821352A010DEDFCBCFC1E22E619FC4B Danish 64-bit D5D34DFFE45BA557581ADA14AC025DB8AA58989D054706061B7D0E5B47F5FDB9 Dutch 64-bit EE9BC545673D8F954A1EDAC691D749438D3E4DFA10A75D2519F79E3708D79FDC English 64-bit B56B911BF18A2CEAEB3904D87E7C770BDF92D3099599D61AC2497B91BF190B11 English International 64-bit D5A4C97C3E835C43B1B9A31933327C001766CE314608BA912F2FFFC876044309 Estonian 64-bit D12DC03FA15A7F436A800692E9BA30DBDDAA4CD6122DD71719A2898E953B5407 Finnish 64-bit D6057E058021A9DF8A02B7BB16331D88C38E8BB63D5AD897D094E0DF6C6ADB5A French 64-bit B73AA55DB50D2AD348F61C6537DA05C0D6DED78A143763454E977BE85B444119 French Canadian 64-bit 3CA47351DAD16BD3F7AFA27CDEB321DD726B79859DE8D2769359C7621DE38EC4 German 64-bit 96E036F1219F9EE59F96312CE43EC7DF093E768383A77132750271940926A013 Greek 64-bit 92248F9F5A8735337D4B0DCCE4DD13348F8718858590FD9D1EB00020B5AFB33A Hebrew 64-bit 27D8090B9266A2DAC04E403FE63E46ADE661A5661BF26CA5EBB1A2F13245E86F Hungarian 64-bit 7B58807592AEB2FC5DDF5AF9749FA023CE9165AA4A1BF4F4741709F8AA2EE9D3 Italian 64-bit D95EC65EC06B4036835C7571FE0108159848D2883EF5DA3A67E480130B1F5862 Japanese 64-bit A1E1BCB6C014F39E4A324EC24DB1B745EE62617D29A450BF7B2596A3DCCECD7D Korean 64-bit 63ED86ADFC53F464649478F931EAE39A42DB3FD86C266C9B5AF7F8E19D318C51 Latvian 64-bit F4C2BF7C16576E6D631070D7B7CF6F55E8359D0729B571C570DC6F39D77D9EA6 Lithuanian 64-bit 23B14643B0AD6FDD0231EA201C5E1B000912EE3A0542F1B1F6907DB470AE7D7B Norwegian 64-bit 6CBD6C3FED9CE08AF85420F19D01C287FC58EC0C42DC7409D1D5C341CEB6492B Polish 64-bit 654273603A945EBA3B185FD5D2C22207A0EB788B5E3402F71E6D0839B3026943 Brazilian Portuguese 64-bit 1BC63E9C62FE3EB7E46778F24C790933770FA7430304583BBDF96B47A5D61F1F Portuguese 64-bit FAAFEDFC301A381B0712FC8DB9F0A16ADE2716B998DD4855D0A38172A9A87AB4 Romanian 64-bit 8B23AD43DF35EB75FADDFDFC85D616A001A4D72C757E5286011E3DC9452A5862 Russian 64-bit D0FBDB93864BF6C8ADE844473C9600EBB031C8BB656A272C736E45DFBD9B3BC9 Serbian Latin 64-bit 566047460EEA2F0E0D36E7A378DCFEEA79D7D3C0328227646BE4AB9AC39A9E36 Slovak 64-bit 5B77F2B5F7C77ABF68E628AC37A8841BB1058B7173C1C76DC5A5F6C5BBA855FD Slovenian 64-bit 73F0DC7CC15885F565C76D78D54E4E4D9934720FFE583B52EFDDA2E2457402D7 Spanish 64-bit 708AF7C9AC63B7EB045CA9B196568758B6C1749E8D13CADE61FAACBC7C66D142 Spanish (Mexico) 64-bit BA7A9B9A2052DEF8C24BCB88C76A47B2E6A6C6EE547EED226B9702C5C63AFC69 Swedish 64-bit F3674D377253E2D12635FDEAB76193E80BD80C56A41D10AB9CCEFDB0CDF1AD82 Thai 64-bit CAD5590347376103E369D7E04941B94C037F4CC6C75D81DC50E194FFA87C8CF8 Turkish 64-bit A47E054FE0B762F5B48D08E2B6F768F4B1CA0BC6DB831A76E829F92E578483AF Ukrainian 64-bit 68BABB954E4BFDF8A03ABC188D5120CF12D4DCA3CAE68EB1BFD4B64F872826E2 Some changes as from October 2024, Microsoft Windows [Version 10.0.22631.4249] (c) Microsoft Corporation. All rights reserved. C:\\Windows\\System32>reg.exe delete \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\CompatMarkers\" /f 2>NUL The operation completed successfully. C:\\Windows\\System32>reg.exe delete \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\Shared\" /f 2>NUL The operation completed successfully. C:\\Windows\\System32>reg.exe delete \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\TargetVersionUpgradeExperienceIndicators\" /f 2>NUL The operation completed successfully. C:\\Windows\\System32>reg.exe add \"HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AppCompatFlags\\HwReqChk\" /f /v HwReqChkVars /t REG_MULTI_SZ /s , /d \"SQ_SecureBootCapable=TRUE,SQ_SecureBootEnabled=TRUE,SQ_TpmVersion=2,SQ_RamMB=8192,\" The operation completed successfully. C:\\Windows\\System32> C:\\Windows\\System32>reg.exe add \"HKLM\\SYSTEM\\Setup\\MoSetup\" /f /v AllowUpgradesWithUnsupportedTPMOrCPU /t REG_DWORD /d 1 The operation completed successfully. C:\\Windows\\System32> See also ZDNET, https://www.zdnet.com/article/how-to-upgrade-your-incompatible-windows-10-pc-to-windows-11/ & Rufus, https://rufus.ie/en/ . It is reported that the changes above have been implemented in Rufus 4.6.","title":"Official site"},{"location":"SYSTEMS/#build-on-the-fly","text":"git clone https://github.com/AveYo/MediaCreationTool.bat cd MediaCreationTool.bat MediaCreationTool.bat and also https://massgrave.dev/windows_11_links , https://github.com/Raphire/Win11Debloat .","title":"Build on the fly"},{"location":"SYSTEMS/#further-information","text":"CloneVDI is described in this thread, https://forums.virtualbox.org/viewtopic.php?f=6&t=22422 . Moreover, https://www.maketecheasier.com/shrink-your-virtualbox-vm/ and http://bytefreaks.net/windows/reclaim-empty-space-from-virtualbox-vdi-disk-images-by-shrinking . vdi.md as in GWAS-2017 and now listed in GDCT Since one may allocate only part of RAM to VirtualBox, it is often necessary to run program under MS-DOS, e.g., sections on DEPICT. Additional note: 6.1.4 has problem with its Guest Additions. To enable copy/paste through clipboard one can use VBoxGuestAdditions_6.1.97-136310.iso as discused here, https://www.virtualbox.org/ticket/19336. When VirtualBox failed to start a session, one can enter MS-DOS prompt as adminstrator, and issue command bcdedit /set hypervisorlaunchtype off and restart the computer. When there is error message VirtualBox Failed to open session for Virtual Machine , then right click the machine and Discard Saved state . Otherwise, uninstall VirtualBox and reinstall. VirtualBox 7.x.x is considerably easier to set up.","title":"Further information"},{"location":"SYSTEMS/#systemrescue","text":"See https://www.system-rescue.org/","title":"SystemRescue"},{"location":"SYSTEMS/#windows-hyper-v","text":"Web page: https://docs.microsoft.com/en-us/virtualization/hyper-v-on-windows/quick-start/enable-hyper-v Enable Hyper-V to create virtual machines on Windows 10. Hyper-V can be enabled in many ways including using the Windows 10 control panel, PowerShell or using the Deployment Imaging Servicing and Management tool (DISM). This documents walks through each option: Control Panel --> Programs --> Programs and Features --> Turn Windows Features on or off : Hyper-V , Virtual Machine Platforms .","title":"Windows Hyper-V"},{"location":"SYSTEMS/#wsl","text":"Official page: https://github.com/microsoft/WSL To check version of Windows, issue winver (MS-DOS Prompt: Windows + r, cmd, winver).","title":"WSL"},{"location":"SYSTEMS/#installation-and-setup","text":"A description on PowerShell is here, https://learn.microsoft.com/en-us/windows/wsl/install . To initiate from PowerShell, use dism /online /enable-feature /feature-name:Microsoft-Windows-Subsystem-Linux /all /norestart dism /online /enable-feature /featurename:VirtualMachinePlatform /all /norestart After installation, it can be invoked from a MS-DOS Prompt with wsl -help wsl --list wsl --list --online wsl --list --verbose wsl --distribution Ubuntu wsl --set-default Ubuntu-20.04 The command could also takes additional parameters, e.g., -d debian. One can also create a desktop entry pointing to C:\\Windows\\system32\\wsl.exe. Location of the distros are found by cd %LocalAppData%\\Packages\\ cd CanonicalGroupLimited.UbuntuonWindows_79rhkp1fndgsc For old Windoes build, there is complaint about writing BIOS then an update called wsl_update_x64.msi is required. A comparison of WSL 1 and WSL 2 can be seen from https://aka.ms/wsl2 ( https://learn.microsoft.com/en-us/windows/wsl/compare-versions ). To migrate from WSL 1 to WSL 2, several options are possible, # A distribution only wsl --set-version Ubuntu 2 wsl --set-version Ubuntu-20.04 2 # WSL as a whole wsl --set-default-version 2 and return with error messages, Conversion in progress, this may take a few minutes... For information on key differences with WSL 2 please visit https://aka.ms/wsl2 The requested operation could not be completed due to a virtual disk system limitation. Virtual hard disk files must be uncompressed and unencrypted and must not be sparse. To fix this, as in https://logfetch.com/wsl2-uncompressed/ , open up File Explorer and navigate to: C:\\Users\\YOUR_USER\\AppData\\Local\\Packages\\CanonicalGroupLimited... Right click on LocalState , then Properties , then Advanced . Ensure Compress contents to save disk space and Encrypt contents to secure data are both deselected. Click OK , then Apply , then Apply changes to this folder only Software mobaXterm, https://mobaxterm.mobatek.net/ offers Advanced WSL settings , Graphical environments such as Gnome-desktop/LDXE-desktop/XFCE4-desktop so as to create sessions using graphical desktops. The corresponding installations are ubuntu-gnome-desktop (gnome-session-bin), lubuntu-desktop , xubuntu-desktop (in fact xfce4-session, xubuntu-core, xubuntu-default-settings), respectively. After started, create a session for WSL, open it and install software. sudo apt update sudo apt install nautilus sudo apt install firefox etc. By default C:\\ is /mnt/c. To establish other drives, one can do sudo mkdir /mnt/d sudo mount -t drvfs D: /mnt/d sudo umount /mnt/d The network drive can also be mapped, e.g., sudo mkdir /mnt/u sudo mount -t drvfs '\\\\me-filer1.medschl.cam.ac.uk\\home$\\jhz22' /mnt/u sudo mount -t drvfs U: /mnt/u See https://www.cyberciti.biz/faq/ubuntu-linux-install-gnome-desktop-on-server/ . See also https://www.makeuseof.com/tag/linux-desktop-windows-subsystem/ . ubuntu-wsl is a set of WSL utilities, which could be installed with sudo apt install ubuntu-wsl for executables at /usr/bin , e.g., wslvar PATH for Windows' \\%PATH environment variable and wslsys for basic information, wslusc to create a short cut on Windows desktop.","title":"Installation and setup"},{"location":"SYSTEMS/#gui-apps","text":"This is available from https://learn.microsoft.com/en-us/windows/wsl/tutorials/gui-apps . Example applications include gedit, gimp, nautilus, vlc, x11-apps, microsoft-edge ( https://www.microsoftedgeinsider.com/en-us/download?platform=linux-deb ). In case of problems, try Intel\u00ae Driver & Support Assistant (Intel\u00ae DSA), https://www.intel.co.uk/content/www/uk/en/support/detect.html . These files are described https://ubuntu.com/tutorials/install-ubuntu-on-wsl2-on-windows-11-with-gui-support#1-overview showing octave: We start with octave --gui and open scripts julia.m and juliatest.m , select run and then save file and run for figure julia.png , . The flowblade GUI is available with sudo apt-get install flowblade . After installation, one can start with export PYTHONPATH=/usr/share/flowblade/Flowblade:/usr/share/flowblade/Flowblade/vieweditor/:/usr/share/flowblade/Flowblade/tools export SHORTCUTS_PATH=/usr/share/flowblade/Flowblade/res /usr/bin/flowblade or cd /bin;./flowblade , https://github.com/jliljebl/flowblade/issues/857 .","title":"GUI apps"},{"location":"SYSTEMS/#xfce4-rdp","text":"xfce4 can also be made available with sudo apt update sudo apt install xorg sudo apt install xfce4 echo xfce4-session > ~/.xsession xfce4-session & or work with xrdp: sudo apt install xrdp sudo cp /etc/xrdp/xrdp.ini /etc/xrdp/xrdp.ini.bak sudo sed -i 's/port=3389/port=3390/' /etc/xrdp/xrdp.ini sudo /etc/init.d/xrdp restart One can add line sudo service xrdp start to ~/.bashrc . Moreover from a start up directory such as %UserProfile%, create a file named .wslconfig with lines such as processors=4 memory=4GB swap=4GB localhostForwarding=true The remote desktop can be started from DOS Prompt mstsc for localhost:3390 (127.0.0.1:3390). Programs such as FireFox can be started. To avoid running the service from every terminal session, amend the service xrdp as follows, export plus=$(service --status-all 2>&1 | grep xrdp | cut -d' ' -f3) if [[ \"$plus\" != \"+\" ]]; then echo $plus sudo service xrdp start fi Earlier note: We use check IPv4 address from Windows as follows with systeminfo (or ipconfig ) and start rdp to :3390. To avoid a dark screen, select Applications --> Settings --> Light Locker Settings --> Automatically lock the screen --> Never , Apply . One may also execute wsl --shutdown to reninitialize.","title":"xfce4 & rdp"},{"location":"SYSTEMS/#compression-of-disk","text":"wsl --shutdown diskpart select vdist file=\"D:\\wsl\\Ubuntu-2204\\ext4.vhdx\" compact vdisk echo \"%LocalAppData%\\Packages\\CanonicalGroupLimited.UbuntuonWindows_79rhkp1fndgsc\\\\LocalState\\ext4.vhdx\" select vdisk file=\"C:\\Users\\User\\AppData\\Local\\Packages\\CanonicalGroupLimited.UbuntuonWindows_79rhkp1fndgsc\\LocalState\\ext4.vhdx\" compact vdisk","title":"Compression of disk"},{"location":"SYSTEMS/#downloading-specific-distributions","text":"The list is available from https://learn.microsoft.com/en-us/windows/wsl/install-manual but alternatives are also possible. # Ubuntu 21.10 wget https://cloud-images.ubuntu.com/releases/impish/release/ubuntu-21.10-server-cloudimg-amd64-wsl.rootfs.tar.gz mkdir d:\\wsl wsl --import Ubuntu-2110 d:/WSL/Ubuntu-2110 d:/Downloads/ubuntu-21.10-server-cloudimg-amd64-wsl.rootfs.tar.gz wsl -d Ubuntu-2110 adduser jhz22 usermod -aG sudo jhz22 su jhz22 ln -s /mnt/c C ln -s /mnt/d D ln -s /mnt/f F # restart wsl -d Ubuntu-2110 -u jhz22 # Ubuntu 22.04 # https://cloud-images.ubuntu.com/releases/22.04/release-20220923/ubuntu-22.04-server-cloudimg-arm64-wsl.rootfs.tar.gz","title":"Downloading specific distributions"},{"location":"SYSTEMS/#windows-applications","text":"To start firefox or Chrome, here is a simple way, cmd.exe /c start https://github.com echo cmd.exe /c start https://github.com > ${HOME}/bin/edge chmod +x ${HOME}/bin/edge edge It is possible with default applications under Windows, e.g., cmd.exe /c u:/work/eQTL-MR.pptx which opens up PowerPoint directly. One can actually generalize these, e.g., ln -s $HOME/C/Program\\ Files\\ \\(x86\\)/Adobe/Acrobat\\ Reader\\ DC/Reader/AcroRd32.exe /home/$USER/bin/AcroRd32.exe ln -s $HOME/bin/AcroRd32.exe /home/$USER/bin/xpdf followed by a call to AcroRd32.exe and as xpdf , or directly call a list of programs: calc.exe , comp.exe , control.exe , curl.exe , fc.exe , find.exe , finger.exe , mspaint.exe , net.exe , sort.exe , tar.exe , whoami.exe , write.exe , xcopy.exe .","title":"Windows applications"},{"location":"SYSTEMS/#uninstallation","text":"This is achieved by # via wsl wsl --unregister Ubuntu-20.04 # via wslconfig wslconfig /u Ubuntu-20.04","title":"Uninstallation"},{"location":"SYSTEMS/#anaconda","text":"Once installed, it is customary to make several channels accessible, conda config --add channels defaults conda config --add channels conda-forge conda config --add channels bioconda Package in conda-forge include boost, django, glpk, gnuplot, go, gperf, hdf5, ipython, jquery, julia, jupyter, keras, limix, mercurial, miktex, mysql, nano, numpy, pandas, sage, scikit-learn, zlib. Packages in bioconda includes amos, bcftools, beagle, bedops, bedtools, blast, bowtie, bowtie2, bwa, chromhmm, circos, deeptools, emmix, ensembl-vep, fastlmm, fastqc, gatk, gatk4, hclust2, himmer, himmer2, hisat2, igv, impute2, lofreq, mapsplice, mrbayes, ms, nanostat, paml, pbgzip, phylip, picard, plink, plink2, r-wgcna, rsem, rtg-tools, sambamba, samtools, seqkt, sequana, snpeff, snpsift, sra-tools, star, stringtie, tabix, tophat, ucsc-blat, ucsc-liftover, vcftools. For instance, to install intervaltree as required by depict, the following is sufficience, conda install intervaltree All the packages installed can be seen with conda list . To install java, run following command conda install -c anaconda openjdk Other installations include perl, R. Note that conda under Windows is in typically D:/ProgramData/Anaconda2/Library/bin. Altogether we really need to set path=%path%;D:/ProgramData/Anaconda2;D:/ProgramData/Anaconda2/Library/bin Miniconda is available from https://conda.io/miniconda.html . Installation from scratch, wget https://repo.anaconda.com/archive/Anaconda2-2019.03-Linux-ppc64le.sh sh Anaconda2-2019.03-Linux-ppc64le.sh # do not activate at startup conda config --set auto_activate_base false export PYTHONPATH=/scratch/jhz22/lib/python2.7/site-packages/ Examine .bashrc for changes. See https://docs.anaconda.com/anaconda/user-guide/getting-started/ to get started.","title":"Anaconda"},{"location":"SYSTEMS/#github","text":"See physalia for information. It has been suggested to display math using the following premium in a GitHub page, A current repository on GitHub is here, https://cambridge-ceu.github.io/GitHub-matters/ .","title":"GitHub"},{"location":"SYSTEMS/#mercurial","text":"This is associated with the familiar hg command as used for instance by qctool . It is the executable file for Mercurial source code management system, sudo apt install mercurial","title":"mercurial"},{"location":"SYSTEMS/#libraoffice","text":"sudo add-apt-repository ppa:libreoffice/ppa sudo apt-get update sudo apt-get install libreoffice","title":"libraOffice"},{"location":"SYSTEMS/#linuxbrew","text":"Follow http://linuxbrew.sh/ and possibly https://docs.brew.sh sudo apt-get install build-essential sh -c \"$(curl -fsSL https://raw.githubusercontent.com/Linuxbrew/install/master/install.sh)\" echo 'export PATH=\"/home/linuxbrew/.linuxbrew/bin:$PATH\"' >>~/.profile echo 'export MANPATH=\"/home/linuxbrew/.linuxbrew/share/man:$MANPATH\"' >>~/.profile echo 'export INFOPATH=\"/home/linuxbrew/.linuxbrew/share/info:$INFOPATH\"' >>~/.profile PATH=\"/home/linuxbrew/.linuxbrew/bin:$PATH\"","title":"Linuxbrew"},{"location":"SYSTEMS/#aws","text":"http://aws.amazon.com/ The Open Guide to Amazon Web Services . E.g., https://sites.google.com/site/jpopgen/wgsa/create-an-aws-account https://sites.google.com/site/jpopgen/wgsa/launch-an-instance https://sites.google.com/site/jpopgen/wgsa/terminate-an-instance","title":"AWS"},{"location":"SYSTEMS/#modules","text":"It is a system that allows you to easily change between different versions of compilers and other software. Here explains how to set up globally, # https://www.microbialsystems.cn/en/post/xubuntu_env_modules/ wget http://archive.ubuntu.com/ubuntu/pool/universe/m/modules/modules_5.2.0.orig.tar.xz xz -d modules_5.2.0.orig.tar.xz tar xvf modules_5.2.0.orig.tar cd modules-5.2.0 sudo apt-get install tcl-dev tk-dev ./configure make sudo make install ls /usr/local/Modules source /usr/local/Modules/init/bash sudo ln -s /usr/local/Modules/init/profile.sh /etc/profile.d/modules.sh sudo ln -s /usr/local/Modules/init/profile.csh /etc/profile.d/modules.csh echo -e \"\\n# For initiating Modules\" | sudo tee -a /etc/bash.bashrc > /dev/null # Append a line to the end of this file with no return message. echo \". /etc/profile.d/modules.sh\" | sudo tee -a /etc/bash.bashrc > /dev/null less /usr/local/Modules/init/profile.sh module avail module list Modification can be seen, e.g., function module () { curl -sf -XPOST http://modules-mon.hpc.cam.ac.uk/action -H 'Content-Type: application/json' -d '{ \"username\":\"'$USER'\", \"hostname\":\"'$HOSTNAME'\", \"command\":\"'\"$*\"'\" }' 2>&1 > /dev/null; eval `/usr/bin/modulecmd bash $*` } module load matlab/r2014a matlab $@ Usually the eval line is sufficient.","title":"modules"},{"location":"SYSTEMS/#docker","text":"See https://www.docker.com/ and https://docs.docker.com/ # https://docs.docker.com/engine/install/ubuntu/ for pkg in docker.io docker-doc docker-compose docker-compose-v2 podman-docker containerd runc; do sudo apt-get remove $pkg; done # Add Docker's official GPG key: sudo apt-get update sudo apt-get install ca-certificates curl sudo install -m 0755 -d /etc/apt/keyrings sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc sudo chmod a+r /etc/apt/keyrings/docker.asc # Add the repository to Apt sources: echo \\ \"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \\ $(. /etc/os-release && echo \"$VERSION_CODENAME\") stable\" | \\ sudo tee /etc/apt/sources.list.d/docker.list > /dev/null sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin For instance, git clone https://docs.docker.com/engine/install/ubuntu/ docker run --rm -it neoaggelos/knotify /knotify/bin/rna_analysis --sequence AAAAAACUAAUAGAGGGGGGACUUAGCGCCCCCCAAACCGUAACCCC giving CCCAAACCGUAACCCC Sequence: AAAAAACUAAUAGAGGGGGGACUUAGCGCCCCCCAAACCGUAACCCC Structure: ..............((((((.....[[[))))))....]]]...... Energy: -13.800000190734863 Duration: 0.297946 s By default, Docker is only accessible with root privileges (sudo). As a regular user, add your user to the docker group. sudo addgroup --system docker sudo adduser $USER docker newgrp docker sudo snap disable docker sudo snap enable docker For Fedora 33, we have udo dnf -y install dnf-plugins-core sudo dnf config-manager --add-repo https://download.docker.com/linux/fedora/docker-ce.repo sudo dnf install docker-ce docker-ce-cli containerd.io sudo systemctl start docker sudo docker pull eqtlcatalogue/susie-finemapping:v20.08.1 sudo docker run eqtlcatalogue/susie-finemapping:v20.08.1 # remove docker engine # sudo dnf remove docker-ce docker-ce-cli containerd.io # removed all images, containers, and volumes sudo rm -rf /var/lib/docker","title":"docker"},{"location":"SYSTEMS/#openvpn","text":"See https://github.com/OpenVPN/openvpn-gui . Usage example: sudo openvpn --config myconfig.vopn","title":"OpenVPN"},{"location":"SYSTEMS/#time-zone","text":"An example under Fedora, timedatectl list_timezones timedatectl set-timezone Europe/London which is useful to synchronise with files obtained from elsewhere.","title":"Time Zone"},{"location":"SYSTEMS/#web-linux-ssh-terminal","text":"","title":"Web-Linux ssh terminal"},{"location":"SYSTEMS/#shellinabox","text":"sudo apt update sudo apt-get install shellinabox sudo systemctl enable shellinaboxd # restart/start/stop sudo service shellinabox restart sudo systemctl start shellinaboxd sudo systemctl restart shellinaboxd sudo systemctl stop shellinaboxd # alternatives which work sudo /etc/init.d/shellinabox start sudo /etc/init.d/shellinabox stop /etc/init.d/shellinabox status so firefox https://127.0.0.1:4200 & allows for login from firefox.","title":"ShellinaBox"},{"location":"SYSTEMS/#cpolar","text":"Web: https://www.cpolar.com ( https://dashboard.cpolar.com/signup , https://dashboard.cpolar.com/login , https://dashboard.cpolar.com/get-started ) sudo apt install curl curl -L https://www.cpolar.com/static/downloads/install-release-cpolar.sh | sudo bash cpolar sudo systemctl enable cpolar sudo systemctl start cpolar firefox localhost:9200 &","title":"cpolar"},{"location":"SYSTEMS/#featured-articles","text":"Don't like your Linux desktop? Here's how to install an alternative, https://www.zdnet.com/article/dont-like-your-linux-desktop-heres-how-to-install-an-alternative/ .","title":"Featured articles"},{"location":"Utilities/","text":"Web-related notes aria2 Web: https://aria2.github.io/ . The first example, aria2c -c -j10 -i ../urls.txt , specifies that to continue (-c) interrupted download, to use 10 concurrent threads (-j10) and use links in urls.txt . The second example uses - as input: echo https://download.decode.is/s3/download?token=68278faa-0b69-47a0-8fcb-5e7f4057004d&file=10023_32_VDR_VDR.txt.gz | aria2c -i - . wget Some useful instances are as follows, wget -nd --execute=\"robots = off\" --mirror --convert-links --no-parent --wait=5 http://ftp.ebi.ac.uk/pub/databases/spot/eQTL/csv/GTEx_V8/ge/ wget --no-check-certificate https://omicscience.org/apps/pgwas/data/all.grch37.tabix.gz wget --no-check-certificate https://omicscience.org/apps/pgwas/data/all.grch37.tabix.gz.tbi To keep the original timestamp, use -S option. It could be the appropriate way to download directory from a GitHub repository, e.g., git clone https://github.com/statgen/locuszoom.git for https://github.com/statgen/locuszoom/tree/develop/examples/data . lftp This is a sophisticated file transfer program. Non-interactive authentication for both ftp and sftp can be enabled by lftp , which can be installed with # Fedora sudo dnf install lftp # Ubuntu sudo apt install lftp Note in both cases command delimiters are required. Usage example: lftp -c mirror https://ftp.ebi.ac.uk/pub/databases/gwas/summary_statistics/GCST90086001-GCST90087000/ . The options of mirror can be seen with help mirror inside lftp; once done type quit to exit. --- ftp --- We use GENCODE, https://www.gencodegenes.org/human/ ( https://ftp.ebi.ac.uk/pub/databases/gencode/Gencode_human/ ), to illustrate, The following code is used to download release_43. #!/usr/bin/bash HOST=ftp.ebi.ac.uk USER=anonymous PASS=s@cam.ac.uk FTPURL=ftp://$USER:$PASS@$HOST LCD=. RCD=/pub/databases/gencode/Gencode_human/release_43 lftp $HOST <% set_layout(\"hello\", \"Dash\") app %>% set_layout(div(\"hello\"), \"Dash\") app %>% set_layout(list(div(\"hello\"), \"Dash\")) app %>% set_layout(\"Conditional UI using an if statement: \", if (TRUE) \"rendered\", if (FALSE) \"not rendered\") app %>% set_layout(function() { div(\"Current time: \", Sys.time()) }) app A plotly figure (e.g., https://plotly-r.com/ can be passed on as follows, library(plotly) fig <- plot_ly() # fig <- fig %>% add_trace( ... ) # fig <- fig %>% layout( ... ) library(dash) library(dashCoreComponents) library(dashHtmlComponents) app <- Dash$new() app$layout( htmlDiv( list( dccGraph(figure=fig) ) ) ) app$run_server(debug=TRUE, dev_tools_hot_reload=FALSE) The view requires WebGL, see https://get.webgl.org . djvulibre wget https://github.com/barak/djvulibre/archive/debian/3.5.27.1-14.zip unzip 3.5.27.1-14.zip |more cd djvulibre-debian-3.5.27.1-14/ ./autogen.sh configure --prefix=/rds-d4/user/jhz22/hpc-work/ make make install Google-chrome Installation is possible with wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb sudo dpkg -i google-chrome-stable_current_amd64.deb then the Google repository is also added. Mobile tethering See information from here, https://ee.co.uk/help/help-new/network-and-coverage/tethering-or-sharing-internet/how-do-i-share-my-devices-data-connection-through-a-personal-hotspot-or-tethering . locale This is an example to convert from French encoding, #!/usr/bin/bash if [ ! -d ascii ]; then mkdir ascii; fi cd ascii echo $LANG export LANG=en_US.utf8 iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"Data Graph Categorization.csv\" > \"Data Graph Categorization.csv\" iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"Data Graph Coverage Text Books 1980-2016.csv\" > \"Data Graph Coverage Text Books 1980-2016.csv\" iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"Data Graph Features.csv\" > \"Data Graph Features.csv\" iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"R Code Data Analyses.R\" > \"R Code Data Analyses.R\" # needs comment on -prev_p[order(-prev_p[,3]),] iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"R Code Figures.R\" > \"R Code Figures.R\" R --no-save < \"R Code Data Analyses.R\" R --no-save < \"R Code Figures.R\" cd - as in the following paper, Kossmeier et al. Charting the landscape of graphical displays for meta-analysis and systematic reviews: a comprehensive review, taxonomy, and feature analysis. BMC Medical Research Methodology (2020) 20:26, https://doi.org/10.1186/s12874-020-0911-9 Perhaps a somewhat easier way to do is through RStudio's File --> Save with Encoding and choose UTF-8 . Google document and EndNote It is possible to insert citation from an EndNote library, download as RTF more preferably OpenDocument format, and recover the citations through Tools --> Format paper, e.g., https://libguides.jcu.edu.au/endnote/google-docs sphinx The sequence below follows, https://docs.readthedocs.io/en/stable/intro/getting-started-with-sphinx.html. module load python/3.6 virtualenv --system-site-package venv source venv/bin/activate pip install sphinx mkdir docs cd docs sphinx-quickstart make html pip install recommonmark Synchronisation We can employ rsync to synchronise the working node to the web space, e.g., rsync -avrzP $HOME/public_html shell.srcf.net:/public/$HOME Note that it works equally well for backup of files locally. Web site file permission The following commands set read permission to a web site hosted at /public/$HOME/public_html . chmod -R +r /public/$HOME/public_html find /public/$HOME/public_html -type d -exec chmod +x {} \\;","title":"Utilties"},{"location":"Utilities/#web-related-notes","text":"","title":"Web-related notes"},{"location":"Utilities/#aria2","text":"Web: https://aria2.github.io/ . The first example, aria2c -c -j10 -i ../urls.txt , specifies that to continue (-c) interrupted download, to use 10 concurrent threads (-j10) and use links in urls.txt . The second example uses - as input: echo https://download.decode.is/s3/download?token=68278faa-0b69-47a0-8fcb-5e7f4057004d&file=10023_32_VDR_VDR.txt.gz | aria2c -i - .","title":"aria2"},{"location":"Utilities/#wget","text":"Some useful instances are as follows, wget -nd --execute=\"robots = off\" --mirror --convert-links --no-parent --wait=5 http://ftp.ebi.ac.uk/pub/databases/spot/eQTL/csv/GTEx_V8/ge/ wget --no-check-certificate https://omicscience.org/apps/pgwas/data/all.grch37.tabix.gz wget --no-check-certificate https://omicscience.org/apps/pgwas/data/all.grch37.tabix.gz.tbi To keep the original timestamp, use -S option. It could be the appropriate way to download directory from a GitHub repository, e.g., git clone https://github.com/statgen/locuszoom.git for https://github.com/statgen/locuszoom/tree/develop/examples/data .","title":"wget"},{"location":"Utilities/#lftp","text":"This is a sophisticated file transfer program. Non-interactive authentication for both ftp and sftp can be enabled by lftp , which can be installed with # Fedora sudo dnf install lftp # Ubuntu sudo apt install lftp Note in both cases command delimiters are required. Usage example: lftp -c mirror https://ftp.ebi.ac.uk/pub/databases/gwas/summary_statistics/GCST90086001-GCST90087000/ . The options of mirror can be seen with help mirror inside lftp; once done type quit to exit.","title":"lftp"},{"location":"Utilities/#-ftp-","text":"We use GENCODE, https://www.gencodegenes.org/human/ ( https://ftp.ebi.ac.uk/pub/databases/gencode/Gencode_human/ ), to illustrate, The following code is used to download release_43. #!/usr/bin/bash HOST=ftp.ebi.ac.uk USER=anonymous PASS=s@cam.ac.uk FTPURL=ftp://$USER:$PASS@$HOST LCD=. RCD=/pub/databases/gencode/Gencode_human/release_43 lftp $HOST <% set_layout(\"hello\", \"Dash\") app %>% set_layout(div(\"hello\"), \"Dash\") app %>% set_layout(list(div(\"hello\"), \"Dash\")) app %>% set_layout(\"Conditional UI using an if statement: \", if (TRUE) \"rendered\", if (FALSE) \"not rendered\") app %>% set_layout(function() { div(\"Current time: \", Sys.time()) }) app A plotly figure (e.g., https://plotly-r.com/ can be passed on as follows, library(plotly) fig <- plot_ly() # fig <- fig %>% add_trace( ... ) # fig <- fig %>% layout( ... ) library(dash) library(dashCoreComponents) library(dashHtmlComponents) app <- Dash$new() app$layout( htmlDiv( list( dccGraph(figure=fig) ) ) ) app$run_server(debug=TRUE, dev_tools_hot_reload=FALSE) The view requires WebGL, see https://get.webgl.org .","title":"--- R ---"},{"location":"Utilities/#djvulibre","text":"wget https://github.com/barak/djvulibre/archive/debian/3.5.27.1-14.zip unzip 3.5.27.1-14.zip |more cd djvulibre-debian-3.5.27.1-14/ ./autogen.sh configure --prefix=/rds-d4/user/jhz22/hpc-work/ make make install","title":"djvulibre"},{"location":"Utilities/#google-chrome","text":"Installation is possible with wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb sudo dpkg -i google-chrome-stable_current_amd64.deb then the Google repository is also added.","title":"Google-chrome"},{"location":"Utilities/#mobile-tethering","text":"See information from here, https://ee.co.uk/help/help-new/network-and-coverage/tethering-or-sharing-internet/how-do-i-share-my-devices-data-connection-through-a-personal-hotspot-or-tethering .","title":"Mobile tethering"},{"location":"Utilities/#locale","text":"This is an example to convert from French encoding, #!/usr/bin/bash if [ ! -d ascii ]; then mkdir ascii; fi cd ascii echo $LANG export LANG=en_US.utf8 iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"Data Graph Categorization.csv\" > \"Data Graph Categorization.csv\" iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"Data Graph Coverage Text Books 1980-2016.csv\" > \"Data Graph Coverage Text Books 1980-2016.csv\" iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"Data Graph Features.csv\" > \"Data Graph Features.csv\" iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"R Code Data Analyses.R\" > \"R Code Data Analyses.R\" # needs comment on -prev_p[order(-prev_p[,3]),] iconv -c -f UTF-8 -t \"ASCII//TRANSLIT\" ../\"R Code Figures.R\" > \"R Code Figures.R\" R --no-save < \"R Code Data Analyses.R\" R --no-save < \"R Code Figures.R\" cd - as in the following paper, Kossmeier et al. Charting the landscape of graphical displays for meta-analysis and systematic reviews: a comprehensive review, taxonomy, and feature analysis. BMC Medical Research Methodology (2020) 20:26, https://doi.org/10.1186/s12874-020-0911-9 Perhaps a somewhat easier way to do is through RStudio's File --> Save with Encoding and choose UTF-8 .","title":"locale"},{"location":"Utilities/#google-document-and-endnote","text":"It is possible to insert citation from an EndNote library, download as RTF more preferably OpenDocument format, and recover the citations through Tools --> Format paper, e.g., https://libguides.jcu.edu.au/endnote/google-docs","title":"Google document and EndNote"},{"location":"Utilities/#sphinx","text":"The sequence below follows, https://docs.readthedocs.io/en/stable/intro/getting-started-with-sphinx.html. module load python/3.6 virtualenv --system-site-package venv source venv/bin/activate pip install sphinx mkdir docs cd docs sphinx-quickstart make html pip install recommonmark","title":"sphinx"},{"location":"Utilities/#synchronisation","text":"We can employ rsync to synchronise the working node to the web space, e.g., rsync -avrzP $HOME/public_html shell.srcf.net:/public/$HOME Note that it works equally well for backup of files locally.","title":"Synchronisation"},{"location":"Utilities/#web-site-file-permission","text":"The following commands set read permission to a web site hosted at /public/$HOME/public_html . chmod -R +r /public/$HOME/public_html find /public/$HOME/public_html -type d -exec chmod +x {} \\;","title":"Web site file permission"},{"location":"Flask/","text":"ChatGPT under Flask As ususal, this is furnished with app.py and config.py with templates in its simplest form. Flask/ \u251c\u2500\u2500 app.py \u251c\u2500\u2500 config.py \u251c\u2500\u2500 README.md (this file) \u2514\u2500\u2500 templates \u2514\u2500\u2500 index.html which uses environment variable from export OPENAI_API_KEY=$(grep sk ~/doc/OpenAI) . By default, python app.py will enable http://127.0.0.1:5000 : $ python app.py * Serving Flask app 'app' * Debug mode: on WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead. * Running on http://127.0.0.1:5000 Press CTRL+C to quit * Restarting with watchdog (inotify) * Debugger is active! * Debugger PIN: 711-120-470 127.0.0.1 - - [05/Dec/2024 21:48:34] \"GET / HTTP/1.1\" 200 - 127.0.0.1 - - [05/Dec/2024 21:48:34] \"GET / HTTP/1.1\" 200 - Considerable coverage has been given on Flask, e.g., https://cambridge-ceu.github.io/GitHub-matters/Flask/ , and WSGI is touched upon here, https://cambridge-ceu.github.io/CEU-matters/Flask/ .","title":"Flask"},{"location":"Flask/#chatgpt-under-flask","text":"As ususal, this is furnished with app.py and config.py with templates in its simplest form. Flask/ \u251c\u2500\u2500 app.py \u251c\u2500\u2500 config.py \u251c\u2500\u2500 README.md (this file) \u2514\u2500\u2500 templates \u2514\u2500\u2500 index.html which uses environment variable from export OPENAI_API_KEY=$(grep sk ~/doc/OpenAI) . By default, python app.py will enable http://127.0.0.1:5000 : $ python app.py * Serving Flask app 'app' * Debug mode: on WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead. * Running on http://127.0.0.1:5000 Press CTRL+C to quit * Restarting with watchdog (inotify) * Debugger is active! * Debugger PIN: 711-120-470 127.0.0.1 - - [05/Dec/2024 21:48:34] \"GET / HTTP/1.1\" 200 - 127.0.0.1 - - [05/Dec/2024 21:48:34] \"GET / HTTP/1.1\" 200 - Considerable coverage has been given on Flask, e.g., https://cambridge-ceu.github.io/GitHub-matters/Flask/ , and WSGI is touched upon here, https://cambridge-ceu.github.io/CEU-matters/Flask/ .","title":"ChatGPT under Flask"}]} \ No newline at end of file diff --git a/sitemap.xml.gz b/sitemap.xml.gz index 69ac4eb0480a72df6d1b18c7a6e547d30dcba4d7..12ee2f1df7cb1b5ec7e0cdc1300e896738b8f01d 100644 GIT binary patch delta 12 Tcmb=gXOr*d;ArES$W{pe7LEg9 delta 12 Tcmb=gXOr*d;CR6`k*yK{7;po@