diff --git a/common/types.ts b/common/types.ts new file mode 100644 index 000000000..4df008a9c --- /dev/null +++ b/common/types.ts @@ -0,0 +1,3 @@ +import { LinkProps as DXLinkProps } from "@databiosphere/findable-ui/lib/components/Links/components/Link/link"; + +export type LinkProps = Omit & { url: string }; diff --git a/components/Events/common/utils.ts b/components/Events/common/utils.ts index 1ab21d401..86aaa1a18 100644 --- a/components/Events/common/utils.ts +++ b/components/Events/common/utils.ts @@ -3,6 +3,7 @@ import { buildMomentField, convertDateToMoment, getFrontmatterByPaths, + isFrontmatterEvent, } from "../../../content/utils"; import { getDocsDirectory, @@ -141,10 +142,14 @@ function processFrontmatterURL(path?: string): string | null { * @returns frontmatter for an event article. */ export function processEventFrontmatter( - frontmatter: FrontmatterEvent -): FrontmatterEvent { + frontmatter: Frontmatter | undefined +): FrontmatterEvent | undefined { + if (!frontmatter) return; + if (!isFrontmatterEvent(frontmatter)) return; return { ...processFrontmatter(["", frontmatter]), + enableNavigation: false, + enableOutline: false, formattedSessions: formatSessions( frontmatter.sessions, frontmatter.timezone diff --git a/components/Home/components/Section/components/SectionAnalysisPortals/common/utils.ts b/components/Home/components/Section/components/SectionAnalysisPortals/common/utils.ts index 2f4f90d99..2785baa81 100644 --- a/components/Home/components/Section/components/SectionAnalysisPortals/common/utils.ts +++ b/components/Home/components/Section/components/SectionAnalysisPortals/common/utils.ts @@ -23,7 +23,7 @@ export function buildAnalysisPortalCards(browserURL: string): SectionCard[] { { label: ACTION_LABEL.LEARN_MORE, target: ANCHOR_TARGET.SELF, - url: "/learn/introduction/intro-to-terra", + url: "/learn/run-analyses-workflows/intro-to-terra", }, ], media: { @@ -61,7 +61,7 @@ export function buildAnalysisPortalCards(browserURL: string): SectionCard[] { { label: ACTION_LABEL.LEARN_MORE, target: ANCHOR_TARGET.SELF, - url: "/learn/introduction/intro-to-dockstore", + url: "/learn/run-analyses-workflows/intro-to-dockstore", }, ], media: { @@ -93,7 +93,7 @@ export function buildAnalysisPortalCards(browserURL: string): SectionCard[] { { label: ACTION_LABEL.LEARN_MORE, target: ANCHOR_TARGET.SELF, - url: "/learn/interactive-analysis/getting-started-with-bioconductor", + url: "/learn/run-interactive-analyses/getting-started-with-bioconductor", }, ], media: { @@ -109,7 +109,7 @@ export function buildAnalysisPortalCards(browserURL: string): SectionCard[] { { label: ACTION_LABEL.LEARN_MORE, target: ANCHOR_TARGET.SELF, - url: "/learn/interactive-analysis/getting-started-with-galaxy", + url: "/learn/run-interactive-analyses/getting-started-with-galaxy", }, ], media: { @@ -125,7 +125,7 @@ export function buildAnalysisPortalCards(browserURL: string): SectionCard[] { { label: ACTION_LABEL.LEARN_MORE, target: ANCHOR_TARGET.SELF, - url: "/learn/interactive-analysis/getting-started-with-jupyter-notebooks", + url: "/learn/run-interactive-analyses/getting-started-with-jupyter-notebooks", }, ], media: { diff --git a/components/Home/components/Section/components/SectionDatasets/sectionDatasets.tsx b/components/Home/components/Section/components/SectionDatasets/sectionDatasets.tsx index 1d64849ff..eab889e0e 100644 --- a/components/Home/components/Section/components/SectionDatasets/sectionDatasets.tsx +++ b/components/Home/components/Section/components/SectionDatasets/sectionDatasets.tsx @@ -11,8 +11,7 @@ import { } from "./sectionDatasets.styles"; const CONSORTIA_ROADMAP = "/consortia"; -const CONTRIBUTE_DATA = - "/learn/data-submitters/submission-guide/data-submitters-overview"; +const CONTRIBUTE_DATA = "/learn/submit-data"; const EXPLORE_DATASETS = "/data/consortia"; interface SectionDatasetsProps { diff --git a/components/Home/components/Section/components/SectionHero/common/utils.ts b/components/Home/components/Section/components/SectionHero/common/utils.ts index d4938c404..54558d4b5 100644 --- a/components/Home/components/Section/components/SectionHero/common/utils.ts +++ b/components/Home/components/Section/components/SectionHero/common/utils.ts @@ -55,7 +55,7 @@ export function buildCarouselCards(): SectionCard[] { { label: ACTION_LABEL.LEARN_MORE, target: ANCHOR_TARGET.SELF, - url: "/learn/videos/anvil-videos#sharing-on-anvil", + url: "/learn/watch-videos-and-tutorials/anvil-videos", }, ], text: "Our short video series shows how AnVIL improves collaborative science for different researcher roles.", @@ -114,7 +114,7 @@ export function buildCarouselCards(): SectionCard[] { { label: ACTION_LABEL.LEARN_MORE, target: ANCHOR_TARGET.SELF, - url: "/learn/interactive-analysis/getting-started-with-galaxy", + url: "/learn/run-interactive-analyses/getting-started-with-galaxy", }, ], text: "AnVIL has access to full Galaxy capabilities, a computational workbench used by thousands of scientists to analyze biomedical data.", diff --git a/components/Home/components/Section/components/SectionHero/sectionHero.tsx b/components/Home/components/Section/components/SectionHero/sectionHero.tsx index 07b6f4bdb..f3a75e578 100644 --- a/components/Home/components/Section/components/SectionHero/sectionHero.tsx +++ b/components/Home/components/Section/components/SectionHero/sectionHero.tsx @@ -10,8 +10,8 @@ import { Subhead, } from "./sectionHero.styles"; -const GET_STARTED = "/learn"; -const LEARN_MORE = "/overview"; +const GET_STARTED = "/learn/get-started"; +const LEARN_MORE = "/learn"; export const SectionHero = (): JSX.Element => { return ( diff --git a/components/Home/components/Section/components/SectionWorkspaces/sectionWorkspaces.tsx b/components/Home/components/Section/components/SectionWorkspaces/sectionWorkspaces.tsx index ad351f8ec..e7356563c 100644 --- a/components/Home/components/Section/components/SectionWorkspaces/sectionWorkspaces.tsx +++ b/components/Home/components/Section/components/SectionWorkspaces/sectionWorkspaces.tsx @@ -3,7 +3,8 @@ import { Section, SectionSubtitle, SectionTitle } from "../../section.styles"; import { Workspaces } from "./components/Workspaces/workspaces"; import { Headline, SectionLayout } from "./sectionWorkspaces.styles"; -const EXPLORE_WORKSPACES = "/learn/analysis-workflows/using-example-workspaces"; +const EXPLORE_WORKSPACES = + "/learn/run-analyses-workflows/using-example-workspaces"; export const SectionWorkspaces = (): JSX.Element => { return ( diff --git a/components/Layout/components/Content/components/SupportForum/supportForum.styles.ts b/components/Layout/components/Content/components/SupportForum/supportForum.styles.ts new file mode 100644 index 000000000..ebddf1bbc --- /dev/null +++ b/components/Layout/components/Content/components/SupportForum/supportForum.styles.ts @@ -0,0 +1,10 @@ +import { RoundedPaper } from "@databiosphere/findable-ui/lib/components/common/Paper/paper.styles"; +import styled from "@emotion/styled"; + +export const StyledPaper = styled(RoundedPaper)` + display: grid; + margin-top: 64px; + padding: 40px 32px; + place-items: center; + text-align: center; +`; diff --git a/components/Layout/components/Content/components/SupportForum/supportForum.tsx b/components/Layout/components/Content/components/SupportForum/supportForum.tsx new file mode 100644 index 000000000..25a77c785 --- /dev/null +++ b/components/Layout/components/Content/components/SupportForum/supportForum.tsx @@ -0,0 +1,40 @@ +import { + ANCHOR_TARGET, + REL_ATTRIBUTE, +} from "@databiosphere/findable-ui/lib/components/Links/common/entities"; +import { + TEXT_BODY_LARGE_400_2_LINES, + TEXT_HEADING, +} from "@databiosphere/findable-ui/lib/theme/common/typography"; +import { Button, Typography } from "@mui/material"; +import { PATH_PARAMETERS } from "../../../../../../common/constants"; +import { StyledPaper } from "./supportForum.styles"; + +export const SupportForum = (): JSX.Element => { + return ( + + + AnVIL Support Forum + + + Be sure to check out the AnVIL Community for support, plus tips & tricks + from our users and much more. + + + + ); +}; diff --git a/components/Layout/components/Content/content.styles.ts b/components/Layout/components/Content/content.styles.ts index 00436e4bf..275cae5fc 100644 --- a/components/Layout/components/Content/content.styles.ts +++ b/components/Layout/components/Content/content.styles.ts @@ -90,6 +90,10 @@ const muiButtonContainedPrimary = css` } `; +// See https://github.com/emotion-js/emotion/issues/1105. +// See https://github.com/emotion-js/emotion/releases/tag/%40emotion%2Fcache%4011.10.2. +const ignoreSsrWarning = + "/* emotion-disable-server-rendering-unsafe-selector-warning-please-do-not-use-this-the-warning-exists-for-a-reason */"; export const Content = styled.div` h1, h2, @@ -146,4 +150,8 @@ export const Content = styled.div` ${image}; ${muiAlert}; ${muiButtonContainedPrimary}; + + > *:first-child:not(style) ${ignoreSsrWarning} { + margin-top: 0; + } `; diff --git a/components/Layout/components/Header/components/Actions/actions.tsx b/components/Layout/components/Header/components/Actions/actions.tsx new file mode 100644 index 000000000..e78e17dc9 --- /dev/null +++ b/components/Layout/components/Header/components/Actions/actions.tsx @@ -0,0 +1,11 @@ +import { Button } from "@mui/material"; +import Link from "next/link"; +import { BUTTON_PROPS } from "./constants"; + +export const Actions = (): JSX.Element => { + return ( + + ); +}; diff --git a/components/Layout/components/Header/components/Actions/constants.ts b/components/Layout/components/Header/components/Actions/constants.ts new file mode 100644 index 000000000..1331b01e4 --- /dev/null +++ b/components/Layout/components/Header/components/Actions/constants.ts @@ -0,0 +1,7 @@ +import { ButtonProps } from "@mui/material"; + +export const BUTTON_PROPS: Partial = { + color: "primary", + size: "medium", + variant: "contained", +}; diff --git a/components/Layout/components/Main/main.styles.ts b/components/Layout/components/Main/main.styles.ts new file mode 100644 index 000000000..4b8a954a0 --- /dev/null +++ b/components/Layout/components/Main/main.styles.ts @@ -0,0 +1,6 @@ +import { Main as DXMain } from "@databiosphere/findable-ui/lib/components/Layout/components/ContentLayout/components/Main/main"; +import styled from "@emotion/styled"; + +export const StyledMain = styled(DXMain)` + flex-direction: column; +`; diff --git a/components/Layout/components/Section/components/SectionContent/components/SectionOverview/sectionOverview.styles.ts b/components/Layout/components/Section/components/SectionContent/components/SectionOverview/sectionOverview.styles.ts new file mode 100644 index 000000000..a8f0999cf --- /dev/null +++ b/components/Layout/components/Section/components/SectionContent/components/SectionOverview/sectionOverview.styles.ts @@ -0,0 +1,51 @@ +import { primaryMain } from "@databiosphere/findable-ui/lib/styles/common/mixins/colors"; +import styled from "@emotion/styled"; +import { mediaTabletUp } from "@databiosphere/findable-ui/lib/styles/common/mixins/breakpoints"; + +export const GroupOverview = styled.div` + .MuiDivider-root, + .MuiTypography-text-heading { + grid-column: 1 / -1; + } + + .MuiDivider-root { + margin: 32px 0; + } + + .MuiTypography-text-heading { + line-height: 34px; + } +`; + +export const GroupLinks = styled.div` + margin-top: 8px; + + ${mediaTabletUp} { + display: grid; + gap: 0 64px; + grid-auto-columns: 1fr; + + ul + ul { + grid-column: 2; + } + } +`; + +export const UnorderedList = styled("ul")` + list-style-position: inside; + padding-left: 0; + + li { + margin: 4px 0; + padding-left: 24px; // required for list-style-position: inside; allows for market to be positioned inside the list item. + text-indent: -15px; // required for list-style-position: inside; centering marker; half of the 24px width and half marker width @ 6px. + + > * { + margin-left: -6px; // required for list-style-position: inside; assists with vertical alignment of list item; difference between indent and padding adjustments and half of the marker width. + } + + &::marker { + color: ${primaryMain}; + } + } +`; diff --git a/components/Layout/components/Section/components/SectionContent/components/SectionOverview/sectionOverview.tsx b/components/Layout/components/Section/components/SectionContent/components/SectionOverview/sectionOverview.tsx new file mode 100644 index 000000000..b13bdb8f5 --- /dev/null +++ b/components/Layout/components/Section/components/SectionContent/components/SectionOverview/sectionOverview.tsx @@ -0,0 +1,50 @@ +import { Link } from "@databiosphere/findable-ui/lib/components/Links/components/Link/link"; +import { TEXT_HEADING } from "@databiosphere/findable-ui/lib/theme/common/typography"; +import { Divider } from "@mui/material"; +import { Fragment } from "react"; +import { Heading } from "../../../../../../../common/Typography/components/Heading/heading"; +import { + GroupOverview, + GroupLinks, + UnorderedList, +} from "./sectionOverview.styles"; +import { SectionOverviewProps } from "./types"; +import { splitLinks } from "./utils"; + +export const SectionOverview = ({ + overview, +}: SectionOverviewProps): JSX.Element | null => { + if (!overview) return null; + return ( + + {overview.map(({ label, links }, groupIndex) => { + return ( + links.length > 0 && ( + + {groupIndex > 0 && } + + + {splitLinks(links).map( + (links, linksIndex) => + links.length > 0 && ( + + {links.map((linkProps, listIndex) => ( +
  • + +
  • + ))} +
    + ) + )} +
    +
    + ) + ); + })} +
    + ); +}; diff --git a/components/Layout/components/Section/components/SectionContent/components/SectionOverview/types.ts b/components/Layout/components/Section/components/SectionContent/components/SectionOverview/types.ts new file mode 100644 index 000000000..04de68302 --- /dev/null +++ b/components/Layout/components/Section/components/SectionContent/components/SectionOverview/types.ts @@ -0,0 +1,14 @@ +import { LinkProps } from "../../../../../../../../common/types"; + +export interface Overview { + label: string; + links: OverviewLink[]; +} + +export type OverviewLink = string | LinkProps; + +export interface SectionOverviewProps { + overview: (Omit & { + links: Exclude[]; + })[]; +} diff --git a/components/Layout/components/Section/components/SectionContent/components/SectionOverview/utils.ts b/components/Layout/components/Section/components/SectionContent/components/SectionOverview/utils.ts new file mode 100644 index 000000000..821762e2c --- /dev/null +++ b/components/Layout/components/Section/components/SectionContent/components/SectionOverview/utils.ts @@ -0,0 +1,186 @@ +import { OutlineItem } from "@databiosphere/findable-ui/lib/components/Layout/components/Outline/outline"; +import { + isClientSideNavigation, + isURLString, +} from "@databiosphere/findable-ui/lib/components/Links/common/utils"; +import { + Frontmatter, + FrontmatterOverview, +} from "../../../../../../../../content/entities"; +import { isFrontmatterOverview } from "../../../../../../../../content/typeGuards"; +import { slugifyHeading } from "../../../../../../../../plugins/common/utils"; +import { OverviewLink } from "./types"; +import { LinkProps } from "../../../../../../../../common/types"; + +const MAX_ROWS = 3; +const OVERVIEW_OUTLINE_DEPTH = 2; + +/** + * Maps an overview link to LinkProps. + * A string link is converted to a LinkProps with the title taken from the frontmatter. + * An undefined value is returned if the link is not found in the frontmatter, or if the title is not found. + * @param section - Section. + * @param link - Overview link. + * @param frontmatters - Paths with frontmatter. + * @returns link props. + */ +function getOverviewLink( + section: string, + link: OverviewLink, + frontmatters: [string, Frontmatter][] +): LinkProps | undefined { + // Grab the configured URL from the link. + const url = getOverviewLinkUrl(link); + if (!url) return; + // Grab the configured label from the link. + const label = getOverviewLinkLabel(link); + // Handle external links. + if (!isClientSideNavigation(url)) { + if (typeof link === "string") return; + return { label, url }; + } + // Otherwise, handle internal links. + // Find the corresponding frontmatter for the link. + const pathFrontmatter = getPathFrontmatter(section, url, frontmatters); + if (!pathFrontmatter) return; + // Extract the title from the frontmatter. + const [, { title }] = pathFrontmatter; + if (!title) return; + // Return the link props. + return { + label: label || title, + url, + }; +} + +/** + * Gets the label, if configured, from an overview link. + * OverviewLink can be a string or LinkProps. + * @param link - Overview link. + * @returns overview link label. + */ +function getOverviewLinkLabel( + link: OverviewLink +): LinkProps["label"] | undefined { + if (typeof link === "string") return; + return link.label; +} + +/** + * Gets the URL configured from an overview link. + * OverviewLink can be a string or LinkProps. + * @param link - Overview link. + * @returns overview link URL. + */ +function getOverviewLinkUrl(link: OverviewLink): string { + if (typeof link === "string") return link; + if (isURLString(link.url)) return link.url; + return ""; +} + +/** + * Finds the path with frontmatter tuple for a given path. + * Compares the given link with the path in the path with frontmatter tuples. + * @param section - Section. + * @param link - Link. + * @param frontmatters - Path frontmatter tuples. + * @returns path with frontmatter tuple. + */ +function getPathFrontmatter( + section: string, + link: string, + frontmatters: [string, Frontmatter][] +): [string, Frontmatter] | undefined { + const regex = new RegExp(`^\\/${section}\\/`); + return frontmatters.find(([path]) => path === link.replace(regex, "")); +} + +/** + * Maps the overview to an outline. + * @param overview - Overview. + * @returns outline. + */ +export function mapFrontmatterOutline( + overview: FrontmatterOverview["overview"] +): FrontmatterOverview["outline"] { + return overview.reduce((acc, { label, links }) => { + if (links.length > 0) { + acc.push({ + depth: OVERVIEW_OUTLINE_DEPTH, + hash: slugifyHeading(label), + value: label, + }); + } + return acc; + }, [] as OutlineItem[]); +} + +/** + * Maps the frontmatter overview to an overview with links mapped to LinkProps. + * @param section - Section. + * @param frontmatter - Frontmatter. + * @param frontmatters - Paths with frontmatter. + * @returns overview. + */ +export function mapFrontmatterOverview( + section: string, + frontmatter: FrontmatterOverview, + frontmatters: [string, Frontmatter][] +): FrontmatterOverview["overview"] { + return frontmatter.overview.map(({ label, links }) => { + return { label, links: parseOverviewLinks(section, links, frontmatters) }; + }); +} + +/** + * Maps overview links to LinkProps for display in the Link component. + * @param section - Section. + * @param links - Overview links. + * @param frontmatters - Paths with frontmatter. + * @returns link props. + */ +function parseOverviewLinks( + section: string, + links: OverviewLink[], + frontmatters: [string, Frontmatter][] +): LinkProps[] { + return links.reduce((acc, link) => { + const overviewLink = getOverviewLink(section, link, frontmatters); + if (overviewLink) { + // Only add the link if the title exists; confirming the overview configured the link correctly. + acc.push(overviewLink); + } + return acc; + }, [] as LinkProps[]); +} + +/** + * Returns parsed overview related frontmatter. + * @param section - Section. + * @param frontmatter - Frontmatter. + * @param frontmatters - Paths with frontmatter. + * @returns frontmatter. + */ +export function processOverviewFrontmatter( + section: string = "", + frontmatter: Frontmatter | undefined, + frontmatters: [string, Frontmatter][] +): Frontmatter | undefined { + if (!frontmatter) return; + if (!isFrontmatterOverview(frontmatter)) return frontmatter; + const overview = mapFrontmatterOverview(section, frontmatter, frontmatters); + const outline = mapFrontmatterOutline(overview); + return { ...frontmatter, outline, overview }; +} + +/** + * Splits group overview links into two arrays suitable for a two-column layout. + * @param links - Section overview links. + * @returns section overview links, evenly split into two arrays. + */ +export function splitLinks( + links: Exclude[] +): Exclude[][] { + const sliceIndex = Math.max(MAX_ROWS, Math.ceil(links.length / 2)); + return [links.slice(0, sliceIndex), links.slice(sliceIndex)]; +} diff --git a/components/Layout/components/Section/components/SectionContent/sectionContent.styles.ts b/components/Layout/components/Section/components/SectionContent/sectionContent.styles.ts new file mode 100644 index 000000000..b18eef051 --- /dev/null +++ b/components/Layout/components/Section/components/SectionContent/sectionContent.styles.ts @@ -0,0 +1,35 @@ +import { + Content, + ContentGrid, + OutlineGrid, + Positioner, +} from "@databiosphere/findable-ui/lib/components/Layout/components/ContentLayout/contentLayout.styles"; +import { mediaTabletDown } from "@databiosphere/findable-ui/lib/styles/common/mixins/breakpoints"; +import styled from "@emotion/styled"; + +export const StyledSection = styled.section` + flex: 1; + width: 100%; +`; + +export const StyledContentGrid = styled(ContentGrid)` + padding: 64px 0; +`; + +export const StyledContent = styled(Content)` + padding: 0 40px; + + ${mediaTabletDown} { + padding: 0 16px; + } +`; + +export const StyledOutlineGrid = styled(OutlineGrid)` + padding: 64px 0; +`; + +export const StyledPositioner = styled(Positioner)` + max-height: ${({ headerHeight }) => `calc(100vh - ${headerHeight}px)`}; + padding-top: 0; + top: ${({ headerHeight }) => `${headerHeight}px`}; +`; diff --git a/components/Layout/components/Section/components/SectionContent/sectionContent.tsx b/components/Layout/components/Section/components/SectionContent/sectionContent.tsx new file mode 100644 index 000000000..49f7b4893 --- /dev/null +++ b/components/Layout/components/Section/components/SectionContent/sectionContent.tsx @@ -0,0 +1,52 @@ +import { PANEL_BACKGROUND_COLOR } from "@databiosphere/findable-ui/lib/components/Layout/components/ContentLayout/common/entities"; +import { + ContentLayout, + Outline, +} from "@databiosphere/findable-ui/lib/components/Layout/components/ContentLayout/contentLayout.styles"; +import { useLayoutState } from "@databiosphere/findable-ui/lib/hooks/useLayoutState"; +import { ContentViewProps } from "@databiosphere/findable-ui/lib/views/ContentView/contentView"; +import { StaticProps } from "../../../../../../content/entities"; +import { + StyledContent, + StyledContentGrid, + StyledOutlineGrid, + StyledPositioner, + StyledSection, +} from "./sectionContent.styles"; + +export const SectionContent = ({ + content, + outline, + slug, +}: Omit & + Pick): JSX.Element => { + const { + layoutState: { headerHeight }, + } = useLayoutState(); + return ( + + + + {content} + + {outline && ( + + + {outline} + + + )} + + + ); +}; diff --git a/components/Layout/components/Section/components/SectionHero/sectionHero.styles.ts b/components/Layout/components/Section/components/SectionHero/sectionHero.styles.ts new file mode 100644 index 000000000..d4e1f28a9 --- /dev/null +++ b/components/Layout/components/Section/components/SectionHero/sectionHero.styles.ts @@ -0,0 +1,82 @@ +import { + Content, + ContentLayout, +} from "@databiosphere/findable-ui/lib/components/Layout/components/ContentLayout/contentLayout.styles"; +import { + media1366Up, + mediaDesktopSmallUp, + mediaTabletDown, +} from "@databiosphere/findable-ui/lib/styles/common/mixins/breakpoints"; +import { + inkLight, + smokeLightest, +} from "@databiosphere/findable-ui/lib/styles/common/mixins/colors"; +import { + textBody4002Lines, + textHeadingXLarge, +} from "@databiosphere/findable-ui/lib/styles/common/mixins/fonts"; +import { css } from "@emotion/react"; +import styled from "@emotion/styled"; + +interface Props { + headerHeight: number; +} + +export const StyledSection = styled("section", { + shouldForwardProp: (props) => props !== "headerHeight", +})` + background-color: ${smokeLightest}; + padding-top: ${({ headerHeight }) => headerHeight}px; + width: 100%; +`; + +export const SectionLayout = styled(ContentLayout)` + grid-template-areas: "hero"; + + ${mediaDesktopSmallUp} { + ${({ hasNavigation }) => + hasNavigation + ? css` + grid-template-areas: ". hero"; + ` + : css` + grid-template-areas: "hero"; + `}; + } + + ${media1366Up} { + grid-template-areas: ". hero ."; + } + + .MuiDivider-root { + grid-column: 1 / -1; + } +`; + +export const Headline = styled(Content)` + grid-area: hero; + padding-bottom: 40px; + padding-top: 40px; + width: 100%; + + ${mediaTabletDown} { + padding-bottom: 40px; + padding-top: 40px; + width: calc(100% - 32px); + } + + .MuiBreadcrumbs-root { + margin: 0 0 4px; + } +`; + +export const PageTitle = styled.h1` + ${textHeadingXLarge}; + margin: 0 auto; +`; + +export const PageSubTitle = styled.div` + ${textBody4002Lines}; + color: ${inkLight}; + margin: 0 auto; +`; diff --git a/components/Layout/components/Section/components/SectionHero/sectionHero.tsx b/components/Layout/components/Section/components/SectionHero/sectionHero.tsx new file mode 100644 index 000000000..656f90fce --- /dev/null +++ b/components/Layout/components/Section/components/SectionHero/sectionHero.tsx @@ -0,0 +1,39 @@ +import { Breadcrumbs } from "@databiosphere/findable-ui/lib/components/common/Breadcrumbs/breadcrumbs"; +import { PANEL_BACKGROUND_COLOR } from "@databiosphere/findable-ui/lib/components/Layout/components/ContentLayout/common/entities"; +import { useLayoutState } from "@databiosphere/findable-ui/lib/hooks/useLayoutState"; +import { StaticProps } from "../../../../../../content/entities"; +import { SectionDivider } from "../../../../../Home/components/Section/section.styles"; +import { + Headline, + PageSubTitle, + PageTitle, + SectionLayout, + StyledSection, +} from "./sectionHero.styles"; +import { BaseComponentProps } from "@databiosphere/findable-ui/lib/components/types"; +import { Props } from "./types"; + +export const SectionHero = ({ + className, + frontmatter: { breadcrumbs, subTitle, title }, + StyledHeadline = Headline, +}: BaseComponentProps & Props & StaticProps): JSX.Element => { + const { + layoutState: { headerHeight }, + } = useLayoutState(); + return ( + + + + {breadcrumbs && } + {title} + {subTitle && {subTitle}} + + + + + ); +}; diff --git a/components/Layout/components/Section/components/SectionHero/types.ts b/components/Layout/components/Section/components/SectionHero/types.ts new file mode 100644 index 000000000..93539d615 --- /dev/null +++ b/components/Layout/components/Section/components/SectionHero/types.ts @@ -0,0 +1,5 @@ +import { ElementType } from "react"; + +export interface Props { + StyledHeadline?: ElementType; +} diff --git a/components/Layout/components/Section/section.styles.ts b/components/Layout/components/Section/section.styles.ts new file mode 100644 index 000000000..f10f54a64 --- /dev/null +++ b/components/Layout/components/Section/section.styles.ts @@ -0,0 +1,15 @@ +import styled from "@emotion/styled"; +import { white } from "@databiosphere/findable-ui/lib/styles/common/mixins/colors"; + +export const StyledSection = styled.section` + background-color: ${white}; + flex: 1; + width: 100%; +`; + +export const SectionLayout = styled.div` + box-sizing: content-box; + margin: 0 auto; + max-width: 1024px; + padding: 64px 16px; +`; diff --git a/components/Learn/components/Workspaces/contants.ts b/components/Learn/components/Workspaces/contants.ts new file mode 100644 index 000000000..dd295ead2 --- /dev/null +++ b/components/Learn/components/Workspaces/contants.ts @@ -0,0 +1,8 @@ +import { Grid2Props } from "@mui/material"; + +export const GRID2_PROPS: Grid2Props = { + columnGap: 4, + container: true, + direction: "column", + sx: { my: 6 }, +}; diff --git a/components/Learn/components/Workspaces/workspaces.tsx b/components/Learn/components/Workspaces/workspaces.tsx new file mode 100644 index 000000000..f6329a96f --- /dev/null +++ b/components/Learn/components/Workspaces/workspaces.tsx @@ -0,0 +1,14 @@ +import { CARDS as WORKSPACES } from "../../../Home/components/Section/components/SectionWorkspaces/common/content"; +import { Card } from "../../../Home/components/Section/components/SectionWorkspaces/components/Workspaces/components/Card/card"; +import { Grid2 } from "@mui/material"; +import { GRID2_PROPS } from "./contants"; + +export const Workspaces = (): JSX.Element => { + return ( + + {WORKSPACES.map((card, index) => ( + + ))} + + ); +}; diff --git a/components/Learn/utils.ts b/components/Learn/utils.ts new file mode 100644 index 000000000..ad0827821 --- /dev/null +++ b/components/Learn/utils.ts @@ -0,0 +1,35 @@ +import { Frontmatter } from "../../content/entities"; +import { ROUTES } from "../../routes/constants"; + +/** + * Returns the learn page related breadcrumbs. + * First crumb is always "Learn". + * Last crumb is the title of the page. + * If the frontmatter has breadcrumbs, they are added in between. + * @param frontmatter - Frontmatter. + * @returns breadcrumbs. + */ +function buildBreadcrumbs( + frontmatter: Frontmatter +): Frontmatter["breadcrumbs"] { + return [ + { path: ROUTES.LEARN, text: "Learn" }, + ...(frontmatter.breadcrumbs ?? []), + { path: "", text: frontmatter.title }, + ]; +} + +/** + * Processes the learn page related frontmatter. + * @param frontmatter - Frontmatter. + * @returns learn page related frontmatter. + */ +export function processFrontmatter( + frontmatter: Frontmatter | undefined +): Frontmatter | undefined { + if (!frontmatter) return; + return { + ...frontmatter, + breadcrumbs: buildBreadcrumbs(frontmatter), + }; +} diff --git a/components/News/common/utils.ts b/components/News/common/utils.ts index 7517414d7..959a06104 100644 --- a/components/News/common/utils.ts +++ b/components/News/common/utils.ts @@ -62,6 +62,8 @@ export function processFrontmatter( return { ...frontmatter, date: processFrontmatterDate(frontmatter), + enableNavigation: false, + enableOutline: false, url: processFrontmatterURL(path), }; } diff --git a/components/common/Card/components/CTACard/constants.ts b/components/common/Card/components/CTACard/constants.ts new file mode 100644 index 000000000..3e4a67691 --- /dev/null +++ b/components/common/Card/components/CTACard/constants.ts @@ -0,0 +1,18 @@ +import { CardProps, SvgIconProps } from "@mui/material"; +import { TEXT_BODY_400_2_LINES } from "@databiosphere/findable-ui/lib/theme/common/typography"; +import { RoundedPaper } from "@databiosphere/findable-ui/lib/components/common/Paper/paper.styles"; + +export const CARD_PROPS: CardProps = { + component: RoundedPaper, +}; + +export const SVG_ICON_PROPS: SvgIconProps = { + color: "primary", + fontSize: "small", + sx: { gridColumn: 2, gridRow: 1, p: 2.5 }, +}; + +export const TYPOGRAPHY_PROPS = { + color: "ink.light", + variant: TEXT_BODY_400_2_LINES, +}; diff --git a/components/common/Card/components/CTACard/ctaCard.styles.ts b/components/common/Card/components/CTACard/ctaCard.styles.ts new file mode 100644 index 000000000..825373d22 --- /dev/null +++ b/components/common/Card/components/CTACard/ctaCard.styles.ts @@ -0,0 +1,32 @@ +import styled from "@emotion/styled"; +import { Card } from "@mui/material"; + +export const StyledCard = styled(Card)` + &.MuiPaper-root { + align-items: stretch; + display: flex; + } + + .MuiButtonBase-root { + align-content: flex-start; + display: grid; + gap: 16px; + grid-template-columns: 1fr; + padding: 16px; + + .MuiSvgIcon-root { + box-sizing: content-box; + } + + img { + margin: 0; + } + } +` as typeof Card; + +export const CardContent = styled.div` + display: flex; + flex-direction: column; + gap: 4px; + grid-column: 1 / span all; +`; diff --git a/components/common/Card/components/CTACard/ctaCard.tsx b/components/common/Card/components/CTACard/ctaCard.tsx new file mode 100644 index 000000000..bf1cfc2be --- /dev/null +++ b/components/common/Card/components/CTACard/ctaCard.tsx @@ -0,0 +1,33 @@ +import { CardTitle } from "@databiosphere/findable-ui/lib/components/common/Card/components/CardTitle/cardTitle"; +import { BaseComponentProps } from "@databiosphere/findable-ui/lib/components/types"; +import { CardActionArea } from "@databiosphere/findable-ui/lib/components/common/Card/components/CardActionArea/cardActionArea"; +import { CardProps } from "@databiosphere/findable-ui/lib/components/common/Card/card"; +import { ForwardArrowIcon } from "@databiosphere/findable-ui/lib/components/common/CustomIcon/components/ForwardArrowIcon/forwardArrowIcon"; +import { CardContent, StyledCard } from "./ctaCard.styles"; +import { Typography } from "@mui/material"; +import { Props } from "./types"; +import { CARD_PROPS, SVG_ICON_PROPS, TYPOGRAPHY_PROPS } from "./constants"; + +export const CTACard = ({ + cardUrl, + className, + EndIcon = ForwardArrowIcon, + secondaryText, + StartIcon, + title, +}: BaseComponentProps & + Pick & + Props): JSX.Element => { + return ( + + + {StartIcon && } + + + {title} + {secondaryText} + + + + ); +}; diff --git a/components/common/Card/components/CTACard/types.ts b/components/common/Card/components/CTACard/types.ts new file mode 100644 index 000000000..a379aca3a --- /dev/null +++ b/components/common/Card/components/CTACard/types.ts @@ -0,0 +1,6 @@ +import { ElementType } from "react"; + +export interface Props { + EndIcon?: ElementType; + StartIcon?: ElementType; +} diff --git a/components/common/CustomIcon/components/CloudBinaryIcon/cloudBinaryIcon.tsx b/components/common/CustomIcon/components/CloudBinaryIcon/cloudBinaryIcon.tsx new file mode 100644 index 000000000..d735fa3dc --- /dev/null +++ b/components/common/CustomIcon/components/CloudBinaryIcon/cloudBinaryIcon.tsx @@ -0,0 +1,21 @@ +import { SvgIcon, SvgIconProps } from "@mui/material"; + +export const CloudBinaryIcon = ({ + fontSize = "large", + viewBox = "0 0 48 48", + ...props +}: SvgIconProps): JSX.Element => { + return ( + + + + + ); +}; diff --git a/components/common/CustomIcon/components/DatabaseIcon/databaseIcon.tsx b/components/common/CustomIcon/components/DatabaseIcon/databaseIcon.tsx new file mode 100644 index 000000000..9fcc7aea1 --- /dev/null +++ b/components/common/CustomIcon/components/DatabaseIcon/databaseIcon.tsx @@ -0,0 +1,16 @@ +import { SvgIcon, SvgIconProps } from "@mui/material"; + +export const DatabaseIcon = ({ + fontSize = "large", + viewBox = "0 0 48 48", + ...props +}: SvgIconProps): JSX.Element => { + return ( + + + + ); +}; diff --git a/components/common/CustomIcon/components/FlaskGearIcon/flaskGearIcon.tsx b/components/common/CustomIcon/components/FlaskGearIcon/flaskGearIcon.tsx new file mode 100644 index 000000000..703761175 --- /dev/null +++ b/components/common/CustomIcon/components/FlaskGearIcon/flaskGearIcon.tsx @@ -0,0 +1,20 @@ +import { SvgIcon, SvgIconProps } from "@mui/material"; + +export const FlaskGearIcon = ({ + fontSize = "large", + viewBox = "0 0 48 48", + ...props +}: SvgIconProps): JSX.Element => { + return ( + + + + + ); +}; diff --git a/components/common/CustomIcon/components/LightBulbOnIcon/lightBulbOnIcon.tsx b/components/common/CustomIcon/components/LightBulbOnIcon/lightBulbOnIcon.tsx new file mode 100644 index 000000000..8584a830c --- /dev/null +++ b/components/common/CustomIcon/components/LightBulbOnIcon/lightBulbOnIcon.tsx @@ -0,0 +1,20 @@ +import { SvgIcon, SvgIconProps } from "@mui/material"; + +export const LightBulbOnIcon = ({ + fontSize = "large", + viewBox = "0 0 48 48", + ...props +}: SvgIconProps): JSX.Element => { + return ( + + + + + ); +}; diff --git a/components/common/CustomIcon/components/PenFieldIcon/penFieldIcon.tsx b/components/common/CustomIcon/components/PenFieldIcon/penFieldIcon.tsx new file mode 100644 index 000000000..bc26253f2 --- /dev/null +++ b/components/common/CustomIcon/components/PenFieldIcon/penFieldIcon.tsx @@ -0,0 +1,20 @@ +import { SvgIcon, SvgIconProps } from "@mui/material"; + +export const PenFieldIcon = ({ + fontSize = "large", + viewBox = "0 0 48 48", + ...props +}: SvgIconProps): JSX.Element => { + return ( + + + + + ); +}; diff --git a/components/common/CustomIcon/components/RocketLaunchIcon/rocketLaunchIcon.tsx b/components/common/CustomIcon/components/RocketLaunchIcon/rocketLaunchIcon.tsx new file mode 100644 index 000000000..92f04734e --- /dev/null +++ b/components/common/CustomIcon/components/RocketLaunchIcon/rocketLaunchIcon.tsx @@ -0,0 +1,20 @@ +import { SvgIcon, SvgIconProps } from "@mui/material"; + +export const RocketLaunchIcon = ({ + fontSize = "large", + viewBox = "0 0 48 48", + ...props +}: SvgIconProps): JSX.Element => { + return ( + + + + + ); +}; diff --git a/components/common/CustomIcon/components/StartIcon/startIcon.tsx b/components/common/CustomIcon/components/StartIcon/startIcon.tsx new file mode 100644 index 000000000..15de3a016 --- /dev/null +++ b/components/common/CustomIcon/components/StartIcon/startIcon.tsx @@ -0,0 +1,17 @@ +import { SvgIcon, SvgIconProps } from "@mui/material"; + +export const StartIcon = ({ + fontSize = "large", + viewBox = "0 0 48 48", + ...props +}: SvgIconProps): JSX.Element => { + return ( + + + + + ); +}; diff --git a/components/common/Typography/components/Heading/heading.tsx b/components/common/Typography/components/Heading/heading.tsx index 2b09cc2c6..947885c6c 100644 --- a/components/common/Typography/components/Heading/heading.tsx +++ b/components/common/Typography/components/Heading/heading.tsx @@ -1,25 +1,34 @@ import { AnchorLink } from "@databiosphere/findable-ui/lib/components/common/AnchorLink/anchorLink"; import { TEXT_HEADING_LARGE } from "@databiosphere/findable-ui/lib/theme/common/typography"; -import { Typography } from "@mui/material"; +import { Typography, TypographyProps } from "@mui/material"; import { slugifyHeading } from "../../../../../plugins/common/utils"; export interface HeadingProps { + component?: "h1" | "h2" | "h3" | "h4" | "h5" | "h6"; enableAnchor?: boolean; headingValue: string; + headingValueSlug?: string; + sx?: TypographyProps["sx"]; + variant?: TypographyProps["variant"]; } export const Heading = ({ + component = "h1", enableAnchor = true, headingValue, + headingValueSlug = slugifyHeading(headingValue), + sx = { mb: 2 }, + variant = TEXT_HEADING_LARGE, }: HeadingProps): JSX.Element => { return ( {headingValue} - {enableAnchor && } + {enableAnchor && } ); }; diff --git a/components/index.tsx b/components/index.tsx index d4e6dca9c..80569633e 100644 --- a/components/index.tsx +++ b/components/index.tsx @@ -41,11 +41,13 @@ export { Resources } from "./Consortia/CSER/components/Resources/resources"; export { Card as EventCard } from "./Events/components/Card/card"; export { EventsHero } from "./Events/components/EventsHero/eventsHero"; export { Events } from "./Events/events"; -export { Card as WorkspaceCard } from "./Home/components/Section/components/SectionWorkspaces/components/Workspaces/components/Card/card"; export { ANVILBranding } from "./Layout/components/Footer/components/Branding/components/ANVILBranding/anvilBranding"; +export { Actions } from "./Layout/components/Header/components/Actions/actions"; export { LabelIconMenuItem } from "./Layout/components/Header/components/Content/components/Navigation/components/NavigationMenuItems/components/LabelIconMenuItem/labelIconMenuItem"; +export { SectionOverview } from "./Layout/components/Section/components/SectionContent/components/SectionOverview/sectionOverview"; export { NewsHero } from "./News/components/NewsHero/newsHero"; export { News } from "./News/news"; export { Grid as AnalysisPortals } from "./Overview/components/AnalysisPortals/analysisPortals.styles"; export { Button as ButtonAddPublication } from "./Overview/components/Publications/components/Button/button"; export { Publications as AnVILPublications } from "./Overview/components/Publications/publications"; +export { Workspaces } from "./Learn/components/Workspaces/workspaces"; diff --git a/content/entities.ts b/content/entities.ts index 8b0e3db11..01c7a0934 100644 --- a/content/entities.ts +++ b/content/entities.ts @@ -1,3 +1,11 @@ +import { Breadcrumb } from "@databiosphere/findable-ui/lib/components/common/Breadcrumbs/breadcrumbs"; +import { LayoutStyle } from "@databiosphere/findable-ui/lib/components/Layout/components/ContentLayout/common/entities"; +import { NavItem } from "@databiosphere/findable-ui/lib/components/Layout/components/Nav/nav"; +import { OutlineItem } from "@databiosphere/findable-ui/lib/components/Layout/components/Outline/outline"; +import { MDXRemoteSerializeResult } from "next-mdx-remote"; +import { Overview } from "../components/Layout/components/Section/components/SectionContent/components/SectionOverview/types"; +import { NodeHero } from "../docs/common/entities"; + export interface EventSession { sessionEnd?: string; sessionStart: string; @@ -6,12 +14,19 @@ export interface EventSession { export type Frontmatter = | DefaultFrontmatter | FrontmatterEvent + | FrontmatterOverview | FrontmatterNews; export interface DefaultFrontmatter { + breadcrumbs?: Breadcrumb[]; description: string; + enableContentEnd?: boolean; + enableNavigation?: boolean; + enableOutline?: boolean; + enableSupportForum?: boolean; hidden?: boolean; layoutStyle?: FrontmatterLayoutStyle; + subTitle?: string; title: string; } @@ -35,6 +50,11 @@ export interface FrontmatterNews extends DefaultFrontmatter { url: string | null; } +export interface FrontmatterOverview extends DefaultFrontmatter { + outline?: OutlineItem[]; + overview: Overview[]; +} + export type FrontmatterLayoutStyle = | "LAYOUT_STYLE_CONTRAST_LIGHT" | "LAYOUT_STYLE_CONTRAST_LIGHTEST" @@ -43,3 +63,14 @@ export type FrontmatterLayoutStyle = | "LAYOUT_STYLE_NO_CONTRAST_LIGHTEST"; export type Hashtag = `#${string}`; + +export interface StaticProps { + frontmatter: Frontmatter; + hero?: NodeHero | null; + layoutStyle: LayoutStyle | null; + mdxSource: MDXRemoteSerializeResult; + navigation?: NavItem[] | null; + outline?: OutlineItem[] | null; + pageTitle: string | null; + slug: string[]; +} diff --git a/content/typeGuards.ts b/content/typeGuards.ts new file mode 100644 index 000000000..a6de613a2 --- /dev/null +++ b/content/typeGuards.ts @@ -0,0 +1,12 @@ +import { Frontmatter, FrontmatterOverview } from "./entities"; + +/** + * Type guard for "overview" related frontmatter. + * @param frontmatter - Frontmatter. + * @returns true if the frontmatter includes the "overview" property. + */ +export function isFrontmatterOverview( + frontmatter: Frontmatter +): frontmatter is FrontmatterOverview { + return "overview" in frontmatter; +} diff --git a/content/utils.ts b/content/utils.ts index bc87cab51..750e0b42c 100644 --- a/content/utils.ts +++ b/content/utils.ts @@ -4,6 +4,8 @@ import moment, { Moment, tz } from "moment-timezone"; import "moment-timezone/index"; import { default as path, default as pathTool } from "path"; import { SlugByFilePaths } from "../docs/common/entities"; +import { resolveDocPath } from "../docs/common/generateStaticPaths"; +import { mapSlugByFilePaths } from "../docs/common/utils"; import { EventSession, Frontmatter, @@ -44,6 +46,18 @@ export function convertDateToMoment( return tz(date, ["D MMM YYYY h:mm A", "D MMM YYYY"], timezone); } +/** + * Returns a tuple array of paths and frontmatter for the given section. + * @param section - Section. + * @returns tuple array of paths and frontmatter. + */ +export function generateSectionPathWithFrontmatter( + section: string +): [string, Frontmatter][] { + const slugByFilePaths = mapSlugByFilePaths(resolveDocPath(section)); + return [...getFrontmatterByPaths(slugByFilePaths)]; +} + /** * Returns the path to the given content directory e.g. "...docs/events" or "...docs/news". * @param dirName - Directory name. diff --git a/docs/anvil-champions.mdx b/docs/anvil-champions.mdx index 96e98165f..aeb7319a5 100644 --- a/docs/anvil-champions.mdx +++ b/docs/anvil-champions.mdx @@ -1,5 +1,6 @@ --- description: "AnVIL Champions serve as a local resource within their community (institution, consortium, research group, geographic region, etc.) who can help members of their community that are interested in using AnVIL." +enableOutline: false title: "AnVIL Champions" --- diff --git a/docs/common/constants.ts b/docs/common/constants.ts index a026f7406..6d2ba673f 100644 --- a/docs/common/constants.ts +++ b/docs/common/constants.ts @@ -4,9 +4,7 @@ import { Link } from "../../components/common/Link/link"; import { Table } from "../../components/common/Table/table"; import DataIngestionChart from "../../components/Consortia/CSER/components/DataIngestionChart/dataIngestionChart"; import { PUBLICATION_CATEGORY } from "../../components/Home/components/Section/components/SectionPublications/common/entities"; -import { CARDS as WORKSPACE_CARDS } from "../../components/Home/components/Section/components/SectionWorkspaces/common/content"; import { ROUTES } from "../../routes/constants"; -import { getContentScope } from "./scope"; export const DOC_SITE_FOLDER_NAME = "docs"; @@ -41,18 +39,17 @@ export const MDX_COMPONENTS = { Publications: C.Publications, ResearchMaterials: C.ResearchMaterials, Resources: C.Resources, + SectionOverview: C.SectionOverview, Subheader: C.Subheader, TextBodyLarge500: C.TextBodyLarge500, Video: C.Video, - WorkspaceCard: C.WorkspaceCard, + Workspaces: C.Workspaces, a: Link, table: Table, }; export const MDX_SCOPE = { - ...getContentScope(), ANCHOR_TARGET, PUBLICATION_CATEGORY, ROUTES, - workspaces: WORKSPACE_CARDS, }; diff --git a/docs/common/entities.ts b/docs/common/entities.ts index 691ad1a48..0448e0411 100644 --- a/docs/common/entities.ts +++ b/docs/common/entities.ts @@ -27,7 +27,6 @@ export enum NavigationKey { export interface NavigationNode extends Pick { - enableOutline?: boolean; hero?: NodeHero; key?: string; label?: string; diff --git a/docs/common/generateStaticPaths.ts b/docs/common/generateStaticPaths.ts new file mode 100644 index 000000000..cb153e25e --- /dev/null +++ b/docs/common/generateStaticPaths.ts @@ -0,0 +1,30 @@ +import { GetStaticPathsResult } from "next/types"; +import pathTool from "path"; +import { DOC_SITE_FOLDER_NAME } from "./constants"; +import { mapSlugByFilePaths } from "./utils"; + +/** + * Generates static paths for the documentation site, for the specified relative path. + * @param relativePath - Relative path. + * @returns static paths. + */ +export function generateStaticPaths( + relativePath = "" +): GetStaticPathsResult["paths"] { + const docPath = resolveDocPath(relativePath); + const slugByFilePaths = mapSlugByFilePaths(docPath); + return [...slugByFilePaths].map(([, slug]) => { + return { + params: { slug }, + }; + }); +} + +/** + * Resolves the absolute path to a specific subdirectory within the documentation folder. + * @param relativePath - Relative path. + * @returns The absolute path to the specified subdirectory within the documentation folder. + */ +export function resolveDocPath(relativePath: string): string { + return pathTool.join(process.cwd(), DOC_SITE_FOLDER_NAME, relativePath); +} diff --git a/docs/common/generateStaticProps.ts b/docs/common/generateStaticProps.ts new file mode 100644 index 000000000..a0cea1d31 --- /dev/null +++ b/docs/common/generateStaticProps.ts @@ -0,0 +1,64 @@ +import { LAYOUT_STYLE_NO_CONTRAST_DEFAULT } from "@databiosphere/findable-ui/lib/components/Layout/components/ContentLayout/common/constants"; +import { OutlineItem } from "@databiosphere/findable-ui/lib/components/Layout/components/Outline/outline"; +import { GetStaticPropsContext } from "next"; +import { SerializeOptions } from "next-mdx-remote/dist/types"; +import { serialize } from "next-mdx-remote/serialize"; +import { GetStaticPropsResult } from "next/types"; +import remarkGfm from "remark-gfm"; +import { Frontmatter, StaticProps } from "../../content/entities"; +import { rehypeSlug } from "../../plugins/rehypeSlug"; +import { remarkHeadings } from "../../plugins/remarkHeadings"; +import { + buildPageSlug, + extractMDXFrontmatter, + getNavigationConfig, + getStaticPropLayoutStyle, + getStaticPropNavigation, + getStaticPropOutline, + parseFrontmatter, +} from "./utils"; + +export async function generateStaticProps( + props: GetStaticPropsContext, + section = "", + frontmatterFn = ( + frontmatter: Frontmatter | undefined + ): Frontmatter | undefined => frontmatter, + serializeOptions: SerializeOptions = {} +): Promise | undefined> { + const slug = buildPageSlug(props, section); + if (!slug) return; + // Extract frontmatter and content from the MDX file. + const { content, data } = extractMDXFrontmatter(slug); + const frontmatter = frontmatterFn(parseFrontmatter(data)); + if (!frontmatter || frontmatter.hidden) return; + // Serialize the MDX content. + const outline: OutlineItem[] = []; + const mdxSource = await serialize(content, { + ...serializeOptions, + mdxOptions: { + rehypePlugins: [rehypeSlug], + remarkPlugins: [[remarkHeadings, outline], remarkGfm], + }, + scope: { ...serializeOptions.scope, frontmatter }, + }); + // Get the navigation configuration. + const { + hero = null, + layoutStyle = LAYOUT_STYLE_NO_CONTRAST_DEFAULT, + navigation = null, + } = getNavigationConfig(slug) || {}; + const { title: pageTitle = null } = frontmatter; + return { + props: { + frontmatter, + hero, + layoutStyle: getStaticPropLayoutStyle(layoutStyle, frontmatter), + mdxSource, + navigation: getStaticPropNavigation(navigation, frontmatter), + outline: getStaticPropOutline(outline, frontmatter), + pageTitle, + slug, + }, + }; +} diff --git a/docs/common/scope.ts b/docs/common/scope.ts deleted file mode 100644 index 21e381d23..000000000 --- a/docs/common/scope.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { config } from "../../config/config"; -import { SiteConfig } from "../../site-config/common/entities"; - -interface ContentScope { - portalURL: string; -} - -/** - * Returns the content scope. - * @returns content scope. - */ -export function getContentScope(): ContentScope { - const siteConfig = config() as SiteConfig; - const portalURL = siteConfig.portalURL; - return { portalURL }; -} diff --git a/docs/common/utils.ts b/docs/common/utils.ts index c9edc5fe2..505925c0b 100644 --- a/docs/common/utils.ts +++ b/docs/common/utils.ts @@ -10,12 +10,49 @@ import { NavItem } from "@databiosphere/findable-ui/lib/components/Layout/compon import { OutlineItem } from "@databiosphere/findable-ui/lib/components/Layout/components/Outline/outline"; import fs from "fs"; import matter from "gray-matter"; +import { GetStaticPropsContext } from "next"; import { GetStaticPathsResult } from "next/types"; import pathTool, * as path from "path"; import { Frontmatter } from "../../content/entities"; import { navigation as navigationConfig } from "../../site-config/anvil-portal/dev/navigation/navigation"; import { DOC_SITE_FOLDER_NAME } from "./constants"; -import { NavigationKey, NavigationNode, SlugByFilePaths } from "./entities"; +import { + Navigation, + NavigationKey, + NavigationNode, + SlugByFilePaths, +} from "./entities"; + +/** + * Returns the page slug for the given static props context and section. + * @param props - Static props context. + * @param section - Document section e.g. "learn". + * @returns page slug. + */ +export function buildPageSlug( + props: GetStaticPropsContext, + section?: string +): string[] | undefined { + const slug = props.params?.slug; + if (!slug || typeof slug === "string") return; + if (section) return [section, ...slug]; + return slug; +} + +/** + * Returns MDX content and frontmatter from the given slug. + * @param slug - Slug. + * @returns MDX content and frontmatter. + */ +export function extractMDXFrontmatter( + slug: string[] +): matter.GrayMatterFile { + const markdownWithMeta = fs.readFileSync( + getMDXFilePath(slug, getDocsDirectory()), + "utf-8" + ); + return matter(markdownWithMeta); +} /** * Filters out headings (H1, and H3-H6) from the outline. @@ -27,11 +64,17 @@ export function filterOutline(outline: OutlineItem): boolean { } /** - * Returns the path to the "docs" directory. - * @returns path to the "docs" directory. + * Returns the static paths for each mdx content in the "docs" directory. + * @returns the static paths for the mdx content. */ -export function getDocsDirectory(): string { - return pathTool.join(process.cwd(), DOC_SITE_FOLDER_NAME); +export function generatePaths(): GetStaticPathsResult["paths"] { + const docsDirectory = getDocsDirectory(); + const slugByFilePaths = mapSlugByFilePaths(docsDirectory); + return [...slugByFilePaths].map(([, slug]) => { + return { + params: { slug }, + }; + }); } /** @@ -59,6 +102,14 @@ function getActiveURL(pagePath: string, navigation?: NavItem[]): string { return activeURLs.find((url) => pagePath === url) || activeURLs.pop() || ""; } +/** + * Returns the path to the "docs" directory. + * @returns path to the "docs" directory. + */ +export function getDocsDirectory(): string { + return pathTool.join(process.cwd(), DOC_SITE_FOLDER_NAME); +} + /** * Returns MDX file path for the given slug. * @param slug - Slug. @@ -72,24 +123,6 @@ function getMDXFilePath( return pathTool.join(docsDirectory, ...slug) + ".mdx"; } -/** - * Returns the navigation items with the active flag set to true if the URL matches the active URL. - * @param activeURL - Active URL. - * @param navigation - Navigation. - * @returns the navigation items with the active flag set to true if the URL matches the active URL. - */ -function getNavItems( - activeURL: string, - navigation?: NavItem[] -): NavItem[] | undefined { - return navigation?.map((navItem) => { - if (activeURL === navItem.url) { - return { ...navItem, active: true }; - } - return navItem; - }); -} - /** * Returns the navigation config for the given slug. * @param slug - Slug. @@ -106,13 +139,7 @@ export function getNavigationConfig( // Loop through the slug and find the node where slug matches the node's slug. for (let i = 0; i < slug.length; i++) { const key = slug[i]; - for (const { - enableOutline = true, - hero, - layoutStyle, - navigation, - slugs, - } of sectionMap.nodes) { + for (const { hero, layoutStyle, navigation, slugs } of sectionMap.nodes) { if (slugs.includes(key)) { if (slug.length !== 1 && i === 0) { // Although the first slug's key is a match, continue if the slug has more than one element. @@ -120,26 +147,114 @@ export function getNavigationConfig( } // Return the layout styles; navigation (and therefore hero) are undefined. if (!navigation) { - return { enableOutline, layoutStyle }; + return { layoutStyle }; } const pagePath = `/${slug.join("/")}`; const activeURL = getActiveURL(pagePath, navigation); if (activeURL) { const navItems = getNavItems(activeURL, navigation); - return { enableOutline, hero, layoutStyle, navigation: navItems }; + return { hero, layoutStyle, navigation: navItems }; } } } } } +/** + * Returns the navigation items with the active flag set to true if the URL matches the active URL. + * @param activeURL - Active URL. + * @param navigation - Navigation. + * @returns the navigation items with the active flag set to true if the URL matches the active URL. + */ +function getNavItems( + activeURL: string, + navigation?: NavItem[] +): NavItem[] | undefined { + return navigation?.map((navItem) => { + if (activeURL === navItem.url) { + return { ...navItem, active: true }; + } + return navItem; + }); +} + +/** + * Returns the static prop "layoutStyle", if specified from the frontmatter; otherwise defaults to configured layoutStyle. + * @param layoutStyle - Layout style. + * @param frontmatter - Frontmatter. + * @returns layout style. + */ +export function getStaticPropLayoutStyle( + layoutStyle: LayoutStyle | null, + frontmatter: Frontmatter | undefined +): LayoutStyle | null { + switch (frontmatter?.layoutStyle) { + case "LAYOUT_STYLE_CONTRAST_LIGHT": + return LAYOUT_STYLE_CONTRAST_LIGHT; + case "LAYOUT_STYLE_CONTRAST_LIGHTEST": + return LAYOUT_STYLE_CONTRAST_LIGHTEST; + case "LAYOUT_STYLE_NO_CONTRAST_DEFAULT": + return LAYOUT_STYLE_NO_CONTRAST_DEFAULT; + case "LAYOUT_STYLE_NO_CONTRAST_LIGHT": + return LAYOUT_STYLE_NO_CONTRAST_LIGHT; + case "LAYOUT_STYLE_NO_CONTRAST_LIGHTEST": + return LAYOUT_STYLE_NO_CONTRAST_LIGHTEST; + default: + return layoutStyle; + } +} + +/** + * Returns the static prop "navigation" from navigation configuration. + * If the frontmatter enableNavigation is false, the returned value is null. + * @param navigation - Navigation configuration. + * @param frontmatter - Frontmatter. + * @returns navigation. + */ +export function getStaticPropNavigation( + navigation: Navigation[] | null, + frontmatter: Frontmatter +): Navigation[] | null { + if (frontmatter.enableNavigation) return navigation; + return null; +} + +/** + * Returns the static prop "outline" from plugin-generated outline items, or the frontmatter outline, where the + * frontmatter outline takes precedence over plugin-generated outline. + * If the frontmatter enableOutline is false, the outline is returned as an empty array. + * @param outline - Plugin-generated outline items. + * @param frontmatter - Frontmatter. + * @returns outline. + */ +export function getStaticPropOutline( + outline: OutlineItem[], + frontmatter: Frontmatter +): OutlineItem[] { + if (frontmatter.enableOutline) { + if ("outline" in frontmatter) return frontmatter.outline ?? []; + if (outline.length > 0) return outline.filter(filterOutline); + } + return []; +} + +/** + * Frontmatter type guard. + * @param data - Grey matter file data. + * @returns true if the data is frontmatter. + */ +function isFrontmatter(data: unknown): data is Frontmatter { + if (!data) return false; + return typeof data === "object" && "title" in data; +} + /** * Returns true if the file is an MDX file. * @param fileName - File name. * @returns true if the file is an MDX file. */ function isMdxFile(fileName: string): boolean { - return fileName.endsWith(".mdx") || fileName.endsWith(".md"); // TODO(cc) update to use .mdx only once md is removed. + return fileName.endsWith(".mdx"); } /** @@ -182,59 +297,26 @@ export function mapSlugByFilePaths( } /** - * Returns the static paths for each mdx content in the "docs" directory. - * @returns the static paths for the mdx content. + * Returns the frontmatter from the given grey matter file data. + * @param data - Grey matter file data. + * @returns frontmatter. */ -export function generatePaths(): GetStaticPathsResult["paths"] { - const docsDirectory = getDocsDirectory(); - const slugByFilePaths = mapSlugByFilePaths(docsDirectory); - return [...slugByFilePaths].map(([, slug]) => { +export function parseFrontmatter( + data: matter.GrayMatterFile["data"] +): Frontmatter | undefined { + if (isFrontmatter(data)) { + const { + enableContentEnd = true, + enableNavigation = true, // Remove default when "learn" UI is updated. + enableOutline = true, + enableSupportForum = false, + } = data; return { - params: { slug }, + enableContentEnd, + enableNavigation, + enableOutline, + enableSupportForum, + ...data, }; - }); -} - -/** - * Returns the content layout style, specified by the navigation config or the frontmatter. - * @param navigationLayoutStyle - Layout style, specified by the navigation config. - * @param frontmatterLayoutStyle - Layout style, specified by the frontmatter. - * @returns layout style. - */ -export function getContentLayoutStyle( - navigationLayoutStyle: LayoutStyle | undefined, - frontmatterLayoutStyle: Frontmatter["layoutStyle"] -): LayoutStyle | null { - if (frontmatterLayoutStyle) { - switch (frontmatterLayoutStyle) { - case "LAYOUT_STYLE_CONTRAST_LIGHT": - return LAYOUT_STYLE_CONTRAST_LIGHT; - case "LAYOUT_STYLE_CONTRAST_LIGHTEST": - return LAYOUT_STYLE_CONTRAST_LIGHTEST; - case "LAYOUT_STYLE_NO_CONTRAST_DEFAULT": - return LAYOUT_STYLE_NO_CONTRAST_DEFAULT; - case "LAYOUT_STYLE_NO_CONTRAST_LIGHT": - return LAYOUT_STYLE_NO_CONTRAST_LIGHT; - case "LAYOUT_STYLE_NO_CONTRAST_LIGHTEST": - return LAYOUT_STYLE_NO_CONTRAST_LIGHTEST; - default: - return null; - } } - return navigationLayoutStyle || null; -} - -/** - * Returns MDX content and frontmatter from the given slug. - * @param slug - Slug. - * @returns MDX content and frontmatter. - */ -export function parseMDXFrontmatter( - slug: string[] -): matter.GrayMatterFile { - const markdownWithMeta = fs.readFileSync( - getMDXFilePath(slug, getDocsDirectory()), - "utf-8" - ); - return matter(markdownWithMeta); } diff --git a/docs/consortia/cser.mdx b/docs/consortia/cser.mdx index 961ed5290..c1e360981 100644 --- a/docs/consortia/cser.mdx +++ b/docs/consortia/cser.mdx @@ -20,7 +20,7 @@ Prioritizing engagement of traditionally underrepresented populations in genomic ## CSER Workspaces -Please see the for the list of CSER Workspaces on AnVIL. +Please see the [AnVIL Dataset Catalog]({portalURL}/data/consortia/CSER/workspaces) for the list of CSER Workspaces on AnVIL. ## CSER diversity & inclusion statement diff --git a/docs/events/bioconductor-popup-workshops-20210426.mdx b/docs/events/bioconductor-popup-workshops-20210426.mdx index 277aa0e4b..b2498bc2b 100644 --- a/docs/events/bioconductor-popup-workshops-20210426.mdx +++ b/docs/events/bioconductor-popup-workshops-20210426.mdx @@ -21,7 +21,7 @@ The main goal is to engage and enable the use of AnVIL / Terra by R users. All s ## Tutorials and Videos -For a tutorial based on the workshop with an accompanying tutorial see [Using R / Bioconductor in AnVIL](/learn/data-analysts/using-r-bioconductor-in-anvil). +For a tutorial based on the workshop with an accompanying tutorial see [Using R / Bioconductor in AnVIL](/learn/run-interactive-analyses/using-r-bioconductor-in-anvil). ## To Register diff --git a/docs/events/bioconductor-popup-workshops-20210503.mdx b/docs/events/bioconductor-popup-workshops-20210503.mdx index eeb1ea563..e904cfcb9 100644 --- a/docs/events/bioconductor-popup-workshops-20210503.mdx +++ b/docs/events/bioconductor-popup-workshops-20210503.mdx @@ -21,7 +21,7 @@ The main goal is to engage and enable the use of AnVIL / Terra by R users. All s ## Tutorials and Videos -For a tutorial based on the workshop with an accompanying tutorial see [The R / Bioconductor AnVIL Package for Easy Access to Buckets, Data, and Workflows, and Fast Package Installation](/learn/data-analysts/the-r-bioconductor-anvil-package). +For a tutorial based on the workshop with an accompanying tutorial see [The R / Bioconductor AnVIL Package for Easy Access to Buckets, Data, and Workflows, and Fast Package Installation](/learn/run-interactive-analyses/the-r-bioconductor-anvil-package). ## To Register diff --git a/docs/events/bioconductor-popup-workshops-20210510.mdx b/docs/events/bioconductor-popup-workshops-20210510.mdx index 2cc48d7c8..7665aa952 100644 --- a/docs/events/bioconductor-popup-workshops-20210510.mdx +++ b/docs/events/bioconductor-popup-workshops-20210510.mdx @@ -21,7 +21,7 @@ The main goal is to engage and enable the use of AnVIL / Terra by R users. All s ## Tutorials and Videos -For a tutorial based on the workshop with an accompanying tutorial see [Running a Workflow: Bulk RNASeq Differential Expression from FASTQ Files to Top Table](/learn/data-analysts/running-a-workflow). +For a tutorial based on the workshop with an accompanying tutorial see [Running a Workflow: Bulk RNASeq Differential Expression from FASTQ Files to Top Table](/learn/run-interactive-analyses/running-a-workflow). ## To Register diff --git a/docs/events/bioconductor-popup-workshops-20210517.mdx b/docs/events/bioconductor-popup-workshops-20210517.mdx index e9f64465d..56872569b 100644 --- a/docs/events/bioconductor-popup-workshops-20210517.mdx +++ b/docs/events/bioconductor-popup-workshops-20210517.mdx @@ -21,7 +21,7 @@ The main goal is to engage and enable the use of AnVIL / Terra by R users. All s ## Tutorials and Videos -For a tutorial based on the workshop with an accompanying tutorial see [Single-cell RNASeq with 'Orchestrating Single Cell Analysis' in R / Bioconductor](/learn/data-analysts/single-cell-rnaseq-with-orchestrating-single-cell-analysis-in-r-bioconductor). +For a tutorial based on the workshop with an accompanying tutorial see [Single-cell RNASeq with 'Orchestrating Single Cell Analysis' in R / Bioconductor](/learn/run-interactive-analyses/single-cell-rnaseq-with-orchestrating-single-cell-analysis-in-r-bioconductor). ## To Register diff --git a/docs/events/bioconductor-popup-workshops-20210524.mdx b/docs/events/bioconductor-popup-workshops-20210524.mdx index 65edc3c09..f67d67df9 100644 --- a/docs/events/bioconductor-popup-workshops-20210524.mdx +++ b/docs/events/bioconductor-popup-workshops-20210524.mdx @@ -21,7 +21,7 @@ The main goal is to engage and enable the use of AnVIL / Terra by R users. All s ## Tutorials and Videos -For a tutorial based on the workshop with an accompanying tutorial see [Using AnVIL for teaching R / Bioconductor](/learn/data-analysts/using-anvil-for-teaching-r-bioconductor). +For a tutorial based on the workshop with an accompanying tutorial see [Using AnVIL for teaching R / Bioconductor](/learn/run-interactive-analyses/using-anvil-for-teaching-r-bioconductor). ## To Register diff --git a/docs/events/bioconductor-popup-workshops-20210531.mdx b/docs/events/bioconductor-popup-workshops-20210531.mdx index 622fe53cd..763a9e3f8 100644 --- a/docs/events/bioconductor-popup-workshops-20210531.mdx +++ b/docs/events/bioconductor-popup-workshops-20210531.mdx @@ -21,7 +21,7 @@ The main goal is to engage and enable the use of AnVIL / Terra by R users. All s ## Tutorials and Videos -For a tutorial based on the workshop with an accompanying tutorial see [Reproducible research with AnVILPublish](/learn/data-analysts/reproducible-research-with-anvilpublish). +For a tutorial based on the workshop with an accompanying tutorial see [Reproducible research with AnVILPublish](/learn/run-interactive-analyses/reproducible-research-with-anvilpublish). ## To Register diff --git a/docs/faq/data-submission.mdx b/docs/faq/data-submission.mdx index 106da6f7e..cd3d91ea1 100644 --- a/docs/faq/data-submission.mdx +++ b/docs/faq/data-submission.mdx @@ -18,9 +18,9 @@ AnVIL aims to host a variety of datasets useful to the genomics community. Submi ## Who pays for storage costs in AnVIL? AnVIL will cover storage costs of data from NHGRI funded studies that has been or will be released publicly to the research community (including those registered in dbGaP or DUOS and released through controlled access). Otherwise, storage costs are incurred by the billing account associated with the workspace. -For more on understanding and controlling cloud costs in AnVIL, see [Understanding Cloud Costs](/learn/introduction/understanding-cloud-costs). +For more on understanding and controlling cloud costs in AnVIL, see [Understanding Cloud Costs](/learn/control-cloud-costs/understanding-cloud-costs). -For more on preparing a budget justification for cloud costs in AnVIL, see [Budget Templates](/learn/investigators/budget-templates). +For more on preparing a budget justification for cloud costs in AnVIL, see [Budget Templates](/learn/control-cloud-costs/budget-templates). ## Does NHGRI plan to move data from dbGaP to AnVIL? diff --git a/docs/guides/content/events-guide.mdx b/docs/guides/content/events-guide.mdx index 315dc32fd..6388b1b6f 100644 --- a/docs/guides/content/events-guide.mdx +++ b/docs/guides/content/events-guide.mdx @@ -95,4 +95,4 @@ Similar to: ## Updating Events with Event Content -Once the event has happened please gather any slides, minutes / notes, video recordings., hashtags for posting the [events](/events) page and [workshop videos](/learn/workshop-videos) listing. These can be delivered as a pull request to the event's page or as a Google doc. +Once the event has happened please gather any slides, minutes / notes, video recordings., hashtags for posting the [events](/events) page and workshop videos listing. These can be delivered as a pull request to the event's page or as a Google doc. diff --git a/docs/learn.mdx b/docs/learn.mdx index 1b1b61a3d..a04c33f34 100644 --- a/docs/learn.mdx +++ b/docs/learn.mdx @@ -1,312 +1,5 @@ --- -description: "A guided walk-through of the AnVIL / Terra documentation with a focus on onboarding and preparing new users to run genomic analyses in the cloud." -title: Getting Started with AnVIL" +description: "A guided walk-through of the AnVIL documentation with a focus on onboarding and preparing new users to run genomic analyses in the cloud." +subTitle: "The AnVIL platform is an NHGRI-supported data commons running on the Google Cloud Platform (GCP)." +title: "Learn About AnVIL" --- - -# Getting Started with AnVIL - -The AnVIL platform is -an [NHGRI](https://www.genome.gov/Funded-Programs-Projects/Computational-Genomics-and-Data-Science-Program/Genomic-Analysis-Visualization-Informatics-Lab-space-AnVIL) --supported data commons running on the Google Cloud Platform (GCP). AnVIL enables researchers to analyze high-value open -and controlled access genomic [datasets]({portalURL}/data/consortia) with popular analysis tools in -a [secure](/overview/security) cloud computing environment. - -AnVIL uses [Terra](https://anvil.terra.bio/#workspaces) as its analysis platform, [AnVIL Data Explorer]({browserURL}) for -data search and artificial cohort creation, and [Dockstore](https://dockstore.org/) as a repository for Docker-based -genomic analysis tools and workflows. - -In addition to Docker-based analysis workflows, AnVIL supports popular interactive analysis tools such as Jupyter -notebooks, Bioconductor, RStudio, and [Galaxy](https://galaxyproject.org/). - -By operating in the cloud, AnVIL users can scale analyses from a single computer to thousands and securely share data, -workflows, and reproducible results with collaborators and colleagues. - - - To get started, see the [Getting Started on - AnVIL](https://jhudatascience.org/AnVIL_Book_Getting_Started/index.html) book. - - -## About AnVIL’s Documentation - -AnVIL’s training materials curate and augment existing component and tool documentation and show how to use AnVIL’s -parts together to accomplish the goals of AnVIL’s different user personas. - -To complement this onboarding and introductory section, the AnVIL team is in the process of developing persona-specific -guides and tutorials. For example, see the guides for data -analysts, [investigators](/learn/investigators/setting-up-lab-accounts), developers, instructors, -and [data contributors](/learn/data-submitters/submission-guide/data-submitters-overview). - -## New User Onboarding - -The following is a guided walk-through of the AnVIL / Terra documentation with a focus on onboarding and preparing new -users to run genomic analyses in the cloud. - -This section covers: - -1. Setting up and linking user accounts. -1. Obtaining access to AnVIL data. -1. An overview of Terra workspaces. -1. An overview of cloud compute costs and setting up billing. - -### Setting Up and Linking User Accounts - -All you need is a Google account to register with Terra and browse AnVIL’s publicly accessible workspaces. - -Likewise, with a Google account, you can register with the AnVIL Data Explorer and browse publicly accessible datasets or register with -Dockstore and browse tools and workflows. - -To send artificial cohorts to Terra for Analysis, you must use the same Google ID for your Data Explorer and Terra accounts. - -To allow your dbGaP data request approvals to flow through to Terra and the Data Explorer, you will need to link your eRA Commons ID -with both platforms. - - - For instructions on setting up accounts in Google, Terra, the AnVIL Data Explorer, and linking - them together, see [How to register in Terra on GCP (Google SSO)](https://support.terra.bio/hc/en-us/articles/360028235911-How-to-register-on-Terra-Google-SSO). - - -### Obtaining Access to AnVIL Data - -AnVIL holds genomic data for hundreds of thousands of study participants. Much of this data is controlled access. - -To obtain access to controlled-access data sets, you must either be a member of a data-generating consortium with a -data-sharing agreement among consortium members or have been granted access to a study through the dbGaP Data Access -Request process. - -Once you have been granted access, and assuming you have linked your eRA commons ID with Terra, you will be -able to see your new studies in the Data Explorer and new data-oriented workspaces in Terra. - -AnVIL’s open-access datasets, such -as [1000 Genomes High Coverage 2019](https://anvil.terra.bio/#workspaces/anvil-datastorage/1000G-high-coverage-2019) can -be accessed in Terra or the Data Explorer immediately after account creation. - -For a detailed listing of available datasets searchable by disease, data type, consent type, and consortia, see -AnVIL’s [Dataset Catalog]({portalURL}/data/consortia). - - - For instructions on requesting data access, see [Requesting Data - Access](/learn/accessing-data/requesting-data-access) and [Discovering - Data](/learn/accessing-data/discovering-data). - - -## Analyzing Data in Terra Workspaces - -In Terra, you use workspaces to configure and run analyses and share results. Terra workspaces typically hold genomic -data and subject-level phenotypic and sample processing data and are configured with analysis tools such as notebooks -and Docker images. Workspaces can also save the output generated by running an analysis with a workspace’s associated -“cloud environment.” - -Terra workspaces support interactive analysis with Jupyter Notebooks, Bioconductor, and Galaxy. Terra workspaces can also -run Docker containerized workflows written in WDL. - -In general, to perform an analysis in a workspace, you set up the data and workflows you require and then launch a cloud -environment to execute the analysis over the data and write out results to the workspace storage bucket. - -You may start with a blank workspace, but typically, you will start by cloning a workspace containing the data or -the analysis you require. - -### Workspace Types - -There are several types of workspaces to consider when thinking about cloning a workspace to start your project. - -**Data-Oriented Workspaces** - These workspaces hold data for AnVIL open or controlled-access data sets or cohorts -exported from the Data Explorer. They may contain documentation in the dashboard about the study that generated the data set and data -tables holding sample and subject phenotypic metadata with links to the genomic data files. - -**Analysis-Oriented Workspaces** - Analysis-oriented workspaces showcase a specific analysis or tool such -as [Hail](https://anvil.terra.bio/#workspaces/help-gatk/Hail-Notebook-Tutorials) -or [Bioconductor](https://anvil.terra.bio/#workspaces/help-gatk/Bioconductor). - -**Example Workspaces** - The example workspaces, also referred to as “Featured” workspaces, are educational tutorial -workspaces demonstrating collections of best practices in analysis and reproducible science. For an example, -see [Reproducing the paper: Variant analysis of Tetralogy of Fallot](https://app.terra.bio/#workspaces/help-gatk/Reproducibility_Case_Study_Tetralogy_of_Fallot). - - - For more information on the different workspace types and how they can help - you get started, see [Start with curated sample - workspaces](https://support.terra.bio/hc/en-us/articles/360028967111-Start-with-curated-sample-workspaces-for-a-variety-of-use-cases) - or [Using Example - Workspaces](/learn/analysis-workflows/using-example-workspaces). - - -### Workspace Composition - -A Terra workspace consists of the following: - -1. A **Dashboard** - for holding markdown documentation about the workspace. -1. A **Cloud Storage Bucket** for holding data files, notebooks, and analysis output. Typically, this bucket is - configured as “requester pays,” meaning that users downloading from the bucket pay cloud egress fees. -1. **Data Tables** - for holding participant or sequencing metadata. For example, is it common to have a set of - “Participant” tables and a set of “Sample Tables”. Participant tables hold one row per participant with phenotypic - data, e.g., gender, age, relevant diseases, etc. Sample tables with one row per sample typically hold information - about the sample sequencing process and metadata. Sample tables also commonly link to the genomic data derived - from the sample. -1. **Reference Data** - for holding links to a reference genome or other reference data such as hg38. -1. **Workspace Data** - for holding additional key-value data pairs used for configuring the workspace. -1. **Cloud Environments** - for executing the workspace’s interactive analysis or workflows. Cloud environments may - consist of a single machine or cluster of machines and be configured with various amounts of RAM and persistent disk. - Cloud environments may be in a running or stopped state. Note that even in the stopped state, cloud environments may - continue to incur charges, for example, for persistent disk space allocated. -1. **A Terra Billing Project** - for specifying the Google Cloud Billing Account charged for GCP cloud compute costs - incurred by the workspace. When Terra Billing Projects are created, they are linked to a Google Cloud Billing - Account. When a workspace is created, it is linked with a Terra Billing Project and thereby, to a Google Cloud - Billing Account. -1. **Permissions** for controlling who can view, clone, update, or share a workspace and who can launch cloud - environments in the workspace. -1. **Authorization Domains** - for controlling who can access a workspace’s data. When a workspace is created, it can be - associated with zero or more authorization domains. Once a workspace is created, its authorization domains can not be - modified. When workspaces are cloned, the clone inherits all of the authorization domains on the original workspace. - At the time of cloning, it is possible to add additional (but not remove) authorization domains. Members wishing to - access the workspace’s data must be members of _all_ of the workspace's authorization domains. - -### Workspace Actions - -Basic actions that can be performed on workspaces are: - -**Create** - Members of Terra Billing Projects can create their own workspace from scratch and associate their Terra -Billing Project with the workspace. - -**Clone** - Terra Billing Project Members can also clone an existing workspace. Cloning a workspace copies its data and -notebooks while possibly changing its Terra Billing Project or adding authorization domains. - -**Launch** - Users with “can-compute” permissions on a workspace can configure and launch cloud environments in the -workspace to analyze the workspace’s data. Cloud costs for the launched environments will be passed through to the -Google Billing Account associated with the workspace’s Terra Billing Project. - -**Share** - Users with “can-share” permissions on a workspace can share the workspace and allow others to read and -potentially update, launch, and share it. - - - Note that while sharing allows users to see a workspace and view the - workspace’s dashboard, users must be members of all of the workspace’s - authorization domains, if any, to view the workspace’s data. - - -### Workspace Permissions - -If you are an Owner, Writer, or Reader of a workspace, Terra displays the workspace in your “Workspaces List.” - -You may also have can-share or can-compute permissions depending on your role and the permissions you were granted when -the workspace was shared with you. The possible workspace permissions are listed below by role. - -| Role | Can Read | Can Modify | Can Compute | Can Share | -| :--------- | :------- | :--------- | :--------------- | :--------------- | -| **Owner** | Yes | Yes | Yes | Yes | -| **Writer** | Yes | Yes | Set when shared. | Set when shared. | -| **Reader** | Yes | No | No | Set when shared. | - -**Owner** - If you created a workspace, you are the workspace’s Owner and can read, modify, share, and execute the -workspace. When sharing workspaces with Readers, you can allow them to share with other readers. When sharing workspaces -with Writers, you can enable them to execute or share with other writers and readers. Workspace owners can also change -the workspace’s Terra Billing Project. - -**Writer** - If you have “Writer” access to a workspace, you can read and modify the workspace. The person who shared -the workspace with you may also have allowed you to execute the workspace by giving you can-compute privileges or to -share the workspace by giving you can-share privileges. - -**Reader** - If you have “Reader” access to a workspace, you can see the workspace in your workspace list and view the -workspace’s dashboard. The person who shared the workspace with you may also have allowed you to share the workspace -with other readers by giving you can-share privileges. - -**Can-compute** - Writers may be given “can-compute” privileges allowing them to launch cloud environments. - -**Can-share** - Readers or Writers may be given “can-share” privileges, allowing them to share the workspace with others. - -In general, if you can share a workspace, you can give the new user the same permissions you have or less. - - - Note that workspace billing charges flow through to the workspace's Terra - Billing Project and Google Cloud Billing Account regardless of which user - launched the workspace’s cloud environment. - - - - See [Introduction to Terra](/learn/introduction/intro-to-terra) for more - information and links to Terra documentation and videos about workspaces. - - -### Workspaces and Cloud Costs - -AnVIL and all of its components are free to use; however, as Terra runs on the Google Cloud Platform (GCP), certain -workspace activities, such as running an analysis, storing analysis results, or downloading data, incur Google Cloud -Platform (GCP) fees. - - - Typically, GCP cloud costs are incurred for workspace data storage, when an - analysis is run in the workspace, and when data is downloaded from GCP. - - -Performing the following workspace activities will incur costs on GCP that will be passed through to the workspace’s -Terra Billing Project’s Google Cloud Billing Account: - -1. Uploading data to the workspace bucket - the upload network transfer or ingress is free; however, there will be a GCP - fee for storing the data in the bucket over time. -1. Launching a Cloud Environment - The charges will depend on the type of machine and number of processors selected as - well as any disk or RAM space used. This is also referred to as “Launching a Workspace.” -1. Storage for persistent disk associated with any running or paused cloud environments. -1. Storage for notebooks as these are saved in the workspace’s Cloud Storage bucket. -1. Downloading data from the workspace’s Cloud Storage bucket unless this bucket is configured to be “requester pays”. - For requester pays buckets, users must select their own Terra Billing Project to pay for the GCP egress fees. - - - See [Understanding Cloud Costs](/learn/introduction/understanding-cloud-costs) - for more information about cloud costs and current GCP pricing. - - -## Setting Up Cloud Billing - - - Note that not all users will need to set up billing. If you are working for a - lab, for example your lab manager may add you as a member to a lab Terra - Billing Project or give you write, can-compute access to your own or a shared - workspace. - - -### Setting up Billing as an Individual - -Setting up GCP billing as an individual is a good way for all users to get started with the platform as Google funds new -accounts with $300 in free cloud cost fees. - -To set up GCP billing as an individual, the general process is as follows: - -1. Create a Google Cloud Account and set up a payment method. Be sure to create the Google Cloud Account using the same - Google ID (email address) you use for your Terra account. -1. Create a Google Billing Account and link it to Terra by adding [terra-billing@terra.bio](mailto:terra-billing@terra.bio) as a _Billing Account User_ to - the account. -1. Set up a GCP Billing Account Budget and appropriate alerts. -1. In Terra, create a Terra Billing Account and use it to create or clone workspaces and pay for any compute, storage, - or egress fees. - -If you plan to share your Terra Billing Project or a workspace with others, be sure you (and they) have a basic -understanding of cloud costs and how cloud costs flow through to the workspace‘s (and not the user’s) Terra Billing -Account. - - - For additional information and detailed instructions for setting up billing as - an individual see [How to Set Up Billing in - Terra](https://support.terra.bio/hc/en-us/articles/360026182251-How-to-set-up-billing-in-Terra). - - -### Setting up Billing for a Lab - -Setting up cloud cost billing for a lab is similar, except that you will need to plan out your account setup to aid the -appropriate assignment of expenses to funding sources, and to enable cloud cost reporting, budgets, and alerts to the -appropriate granularity. - -Budgets and alerts are set at the Terra Billing Project level so you may end up having a Terra Billing Project per lab -member and per shared workspace. - -You will also want to deliberate in your planning about who can-share Terra Billing Accounts and Terra workspaces with -can-compute permissions. For example, you may assign a lab manager who creates workspaces for users and allows them to -execute but not share the workspace. - - - For additional information and approaches, see [Setting Up Lab Billing - Accounts](/learn/investigators/setting-up-lab-accounts) and [Best practices - for managing shared team - costs](https://support.terra.bio/hc/en-us/articles/360047235151-Best-practices-for-managing-shared-team-costs). - - -## Getting Help - -See [Getting Help](/help) for more information on how to obtain support for AnVIL’s components and tools. diff --git a/docs/learn/accessing-data/discovering-data.mdx b/docs/learn/accessing-data/discovering-data.mdx deleted file mode 100644 index 474c9bcb7..000000000 --- a/docs/learn/accessing-data/discovering-data.mdx +++ /dev/null @@ -1,21 +0,0 @@ ---- -title: "Finding and Accessing Datasets" ---- - -# Finding and Accessing Datasets - -- [Discovering datasets]({portalURL}/data/consortia) - Datasets of interest can be discovered - in [AnVIL’s dataset catalog]({portalURL}/data/consortia), - the [AnVIL Data Explorer](https://explore.anvilproject.org/) and by reviewing - data-focused workspaces available to you once you are logged into Terra. -- [Requesting dataset access](/learn/accessing-data/requesting-data-access) - AnVIL's open access datasets are - accessible to all upon logging into [Terra](https://anvil.terra.bio/#workspaces) or [AnVIL Data Explorer](https://explore.anvilproject.org/). - To request access for datasets with access restrictions, see AnVIL's guides for requesting access - to [Controlled Access](/learn/accessing-data/requesting-data-access#accessing-controlled-access-data) - and [Consortium Access](/learn/accessing-data/requesting-data-access#accessing-consortium-access-data) datasets. If - you need help to access additional datasets, please reach out to us - at [help@lists.anvilproject.org]({anvilHelp}). -- [Once your access is approved](/learn/accessing-data/requesting-data-access#once-your-access-is-granted) - the - workspaces associated with your new datasets will be listed on your Terra workspaces - tab. [Clone](https://support.terra.bio/hc/en-us/articles/360026130851-How-to-clone-a-workspace) the workspace to begin - working with the dataset. diff --git a/docs/learn/control-cloud-costs.mdx b/docs/learn/control-cloud-costs.mdx new file mode 100644 index 000000000..7eab27cee --- /dev/null +++ b/docs/learn/control-cloud-costs.mdx @@ -0,0 +1,18 @@ +--- +description: "Learn how to control cloud costs and optimize your cloud usage." +enableContentEnd: false +enableSupportForum: true +overview: + - label: "Overview" + links: + - "/learn/control-cloud-costs/understanding-cloud-costs" + - label: "Grant Proposals" + links: + - "/learn/control-cloud-costs/budget-templates" + - label: "Video Tutorials" + links: + - "/learn/control-cloud-costs/cloud-costs" +title: "Controlling Cloud Costs" +--- + + diff --git a/docs/learn/investigators/budget-templates.mdx b/docs/learn/control-cloud-costs/budget-templates.mdx similarity index 95% rename from docs/learn/investigators/budget-templates.mdx rename to docs/learn/control-cloud-costs/budget-templates.mdx index 1f33bc60a..ac9eddc17 100644 --- a/docs/learn/investigators/budget-templates.mdx +++ b/docs/learn/control-cloud-costs/budget-templates.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/control-cloud-costs" + text: "Controlling Cloud Costs" description: "An overview of best practices for account setup in AnVIL to effectively track and control cloud costs." -title: "Setting up Lab Accounts in AnVIL" +title: "Preparing a Cloud Cost Budget Justification" --- -# Preparing a Cloud Cost Budget Justification - This document walks you through creating a budget justification paragraph for a grant proposal. ## Understanding GCP Fees @@ -16,7 +17,7 @@ Google Cloud Platform charges fees for: 1. **Computing** - Compute costs are driven by CPU and memory usage. See [Standard Machine Types](https://cloud.google.com/compute/all-pricing#n1_standard_machine_types) for more information and current pricing. - For more information and a broader overview of cloud costs, see [Understanding Cloud Costs](/learn/introduction/understanding-cloud-costs). + For more information and a broader overview of cloud costs, see [Understanding Cloud Costs](/learn/control-cloud-costs/understanding-cloud-costs). ## Estimating your Cloud Costs diff --git a/docs/learn/introduction/understanding-cloud-costs.mdx b/docs/learn/control-cloud-costs/understanding-cloud-costs.mdx similarity index 97% rename from docs/learn/introduction/understanding-cloud-costs.mdx rename to docs/learn/control-cloud-costs/understanding-cloud-costs.mdx index e469612f1..3fea5b727 100644 --- a/docs/learn/introduction/understanding-cloud-costs.mdx +++ b/docs/learn/control-cloud-costs/understanding-cloud-costs.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/control-cloud-costs" + text: "Controlling Cloud Costs" description: "Understanding Cloud Costs in AnVIL." title: "Understanding Cloud Costs" --- -# Understanding Cloud Costs - AnVIL and all of its components are free to use. You can browse showcase workspaces and the Data Library as soon as you register for an account. Compute operations, however, such as running workflows, running Jupyter Notebooks, and accessing and storing data, may incur Google Cloud Platform charges. diff --git a/docs/learn/data-analysts.mdx b/docs/learn/data-analysts.mdx deleted file mode 100644 index 1a33b76a9..000000000 --- a/docs/learn/data-analysts.mdx +++ /dev/null @@ -1,36 +0,0 @@ ---- -title: "Data Analysts - Guides and Tutorials" ---- - -# Data Analysts - Guides and Tutorials - - - This section lists guides, tutorials, and other resources to help data - analysts find and process data and share results in the AnVIL cloud. - - -## Bioconductor / RStudio - -- [Getting Started with Bioconductor](/learn/interactive-analysis/getting-started-with-bioconductor) - Guides helping R / Bioconductor users start RStudio or Jupyter for interactive analysis and workflows for large-scale data processing. -- [Starting RStudio on the AnVIL Platform](/learn/data-analysts/rstudio-gsg-video) - A video introduction to using RStudio on the AnVIL platform. - -### 2021 Bioconductor Popup Workshops - -Recordings and related materials for the 2021 Bioconductor popup workshops are listed below: - -- Week 1: [Using R / Bioconductor in AnVIL](/learn/data-analysts/using-r-bioconductor-in-anvil) - An introduction to the AnVIL cloud computing environment. We learn how to create a Google account to use in AnVIL. We explore key concepts related to workspaces and billing projects. We explore creating a Jupyter notebooks-based cloud environment, and an RStudio cloud environment. -- Week 2: [The R / Bioconductor AnVIL Package](/learn/data-analysts/the-r-bioconductor-anvil-package) - An exploration of how workspaces provide a framework for managing data and large-scale analyses using the HCA Optimus Pipeline and 1000G-high-coverage-2019 workspaces and R using the AnVIL package. -- Week 3: [Running a Workflow](/learn/data-analysts/running-a-workflow) - How to configure and run a workflow, based on the Bioconductor-Workflow-DESeq2 workspace. The workflow starts with FASTQ files and transforms them using salmon to the inputs required for Bioconductor DESeq2 analysis of differential expression. -- Week 4: [Single-cell RNASeq with 'Orchestrating Single Cell Analysis' in R / Bioconductor](/learn/data-analysts/single-cell-rnaseq-with-orchestrating-single-cell-analysis-in-r-bioconductor) - An introduction to a resource, developed primarily by Aaron Lun of Genentech, Inc., that employs Bioconductor resources for many aspects of the analysis of single-cell RNA-seq data. The resource is a "computable book" written in R Markdown, published at [https://bioconductor.org/books/release/OSCA](https://bioconductor.org/books/release/OSCA). -- Week 5: [Using AnVIL for Teaching R](/learn/data-analysts/using-anvil-for-teaching-r-bioconductor) - A case study of using AnVIL to teach R for a Biostatistics course and provides essentials for using AnVIL for other instructional efforts. -- Week 6: [Reproducible Research with AnVILPublish](/learn/data-analysts/reproducible-research-with-anvilpublish) - An exploration of elements of reproducible research with the AnVILPublish package. We will illustrate how to make a docker container tailored publishing AnVIL packages and then emphasize the merits of an R package structure for organizing research activities in a manner that emphasizes provenance and reproducibility. -- Week 7: [Participant Stories](/learn/data-analysts/participant-stories) - Participants share their own use of AnVIL. - -## Galaxy - -- [Getting Started with Galaxy](/learn/interactive-analysis/getting-started-with-galaxy) - A step-by-step tutorial demonstrating how to compute quality metrics of unaligned reads, align the reads to a reference genome using bowtie2, plot a coverage histogram, call variants using FreeBayes, and then summarize the variant calls using bcftools. -- [Starting Galaxy on the AnVIL Platform](/learn/data-analysts/galaxy-gsg-video) - A video introduction to using Galaxy on the AnVIL platform. - -## Jupyter - -- [Starting Jupyter on the AnVIL Platform](/learn/data-analysts/jupyter-gsg-video) - A video introduction to using Jupyter Notebooks on the AnVIL platform. diff --git a/docs/learn/data-submitters/resources/anvil-data-withdrawal-procedures.mdx b/docs/learn/data-submitters/resources/anvil-data-withdrawal-procedures.mdx index 55719416f..e4d25453e 100644 --- a/docs/learn/data-submitters/resources/anvil-data-withdrawal-procedures.mdx +++ b/docs/learn/data-submitters/resources/anvil-data-withdrawal-procedures.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/submit-data" + text: "Submitting Data" description: "An overview procedures related to withdrawing data from AnVIL." -title: "AnVIL Data Withdrawal Procedures" +title: "Data Withdrawal Procedures" --- -# Withdrawing Data from AnVIL -
    AnVIL facilitates the removal of individual-level data from studies stored and managed by the resource, honoring the right of research participants to change their preferences with regard to future data sharing. diff --git a/docs/learn/data-submitters/resources/consortium-data-access-guidelines.mdx b/docs/learn/data-submitters/resources/consortium-data-access-guidelines.mdx index 6dd0f1130..a55b37206 100644 --- a/docs/learn/data-submitters/resources/consortium-data-access-guidelines.mdx +++ b/docs/learn/data-submitters/resources/consortium-data-access-guidelines.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/submit-data" + text: "Submitting Data" description: "An overview of data sharing and access guidelines for NIH-funded researchers generating ‘large-scale’ genomic data." -title: "Consortium Guidelines for AnVIL Data Access" +title: "Consortium Data Access Guidelines" --- -# Consortium Guidelines for AnVIL Data Access - ## Overview These guidelines represent AnVIL’s expectations for consortia access to data on the AnVIL. For the purpose of these guidelines "consortium data" refers to the data used by the consortium for the primary research of the consortium. Consortium data access is defined as data sharing between consortium members for the primary research of the consortium. Primary research includes data quality assurance/quality control, analyses, and preparations for submission of data for release to the scientific community. If members of the consortium are using data generated by themselves or other members of the consortium, access to consortium data by consortium members does not require Data Access Committee approval. Some consortia may utilize publicly available data for collaborative analyses, or use a mix of datasets generated by the consortium and publicly accessible datasets. Typical data access procedures, such as Data Access Committee approval, is required for access to data not generated by the consortium. diff --git a/docs/learn/data-submitters/submission-guide/data-approval-process.mdx b/docs/learn/data-submitters/submission-guide/data-approval-process.mdx index 7832b80aa..71d25f024 100644 --- a/docs/learn/data-submitters/submission-guide/data-approval-process.mdx +++ b/docs/learn/data-submitters/submission-guide/data-approval-process.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/submit-data" + text: "Submitting Data" description: "An overview of the approval process for submitting data to AnVIL." -title: "Obtaining Data Approval" +title: "Step 1 - Register Study / Obtain Approvals" --- -# Step 1 - Register Study/Obtain Approvals -
    AnVIL strives to balance the goals of ensuring that data is as widely and freely available as possible while safeguarding the rights and privacy of subjects who participate in NIH-sponsored research. @@ -20,7 +21,7 @@ Institutions are responsible for assuring, through an [Institutional Certificati To deposit large-scale, individual-level data into AnVIL, data submitters must follow the steps below. 1.1. Obtain approval\ -1.2. Register study +1.2. Register study 1.3. Apply to AnVIL ## 1.1. Obtain Approval @@ -80,15 +81,15 @@ See [this link](https://sharing.nih.gov/genomic-data-sharing-policy/submitting-g If you are working with non-human data, you will need to register with an NCBI repository such as GEO or SRA. -## 1.3. Apply to AnVIL +## 1.3. Apply to AnVIL -Prospective AnVIL data submitters should complete the [AnVIL Dataset Onboarding Application](https://docs.google.com/forms/d/e/1FAIpQLSdK_r1DTb_bBUiG9IvY5hwJ4Y-LHoaXk-E4L98MRcLR2TTtcQ/viewform) for review by the AnVIL leadership committee. +Prospective AnVIL data submitters should complete the [AnVIL Dataset Onboarding Application](https://docs.google.com/forms/d/e/1FAIpQLSdK_r1DTb_bBUiG9IvY5hwJ4Y-LHoaXk-E4L98MRcLR2TTtcQ/viewform) for review by the AnVIL leadership committee. -### Before you apply -Note that you will need the **phsID** from **dbGap** (step 1.2 above) and **Access Rrstrictions** (i.e., Data Use limitations/consent groups) to complete the application. +### Before you apply +Note that you will need the **phsID** from **dbGap** (step 1.2 above) and **Access Rrstrictions** (i.e., Data Use limitations/consent groups) to complete the application. ### Access Restrictions -Known Data Use Limitations (DUL) - the list of requirements for gaining access and using the data - need to be clearly defined by the data depositor. This is completed via the NIH Institutional Certification form signed by the submitter's institutional official and provided directly to the Genomic Program Administrator for the IC (i.e,. NHGRI). +Known Data Use Limitations (DUL) - the list of requirements for gaining access and using the data - need to be clearly defined by the data depositor. This is completed via the NIH Institutional Certification form signed by the submitter's institutional official and provided directly to the Genomic Program Administrator for the IC (i.e,. NHGRI). You should include your DULs as part of the AnVIL onboarding application. Consent information, along with other information listed in this spreadsheet, are documented in DUOS. Once ingested into TDR, DUOS will handle all protocols for gaining access. diff --git a/docs/learn/data-submitters/submission-guide/data-submitters-overview.mdx b/docs/learn/data-submitters/submission-guide/data-submitters-overview.mdx index e656d75a8..e39ba12ff 100644 --- a/docs/learn/data-submitters/submission-guide/data-submitters-overview.mdx +++ b/docs/learn/data-submitters/submission-guide/data-submitters-overview.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/submit-data" + text: "Submitting Data" description: "A walkthrough of the AnVIL data submission process and requirements." -title: "AnVIL Data Submission Guide" +title: "Submission Process Overview" --- -# AnVIL Data Submission Guide -
    Welcome to the Data Submitters docs on AnVIL. We’re excited to have you here and helping to push the frontiers of biomedicine. @@ -55,7 +56,7 @@ All individual-level human genomic and phenotypic data must conform to the [NIH ### Access Control -For controlled-access datasets, access control within the AnVIL is governed by three major groups - developer access, consortium access, and external researcher access (via dbGaP). For more information, see [Data Access Controls](/learn/accessing-data/data-access-controls). +For controlled-access datasets, access control within the AnVIL is governed by three major groups - developer access, consortium access, and external researcher access (via dbGaP). For more information, see [Data Access Controls](/learn/find-data/data-access-controls). ## Getting Help diff --git a/docs/learn/data-submitters/submission-guide/ingesting-data.mdx b/docs/learn/data-submitters/submission-guide/ingesting-data.mdx index edcf47b16..bf4182913 100644 --- a/docs/learn/data-submitters/submission-guide/ingesting-data.mdx +++ b/docs/learn/data-submitters/submission-guide/ingesting-data.mdx @@ -1,45 +1,46 @@ --- +breadcrumbs: + - path: "/learn/submit-data" + text: "Submitting Data" description: "An overview of the AnVIL data staging process." title: "Step 4: Stage Your Data in AnVIL" --- -# Step 4: Stage Your Data in AnVIL - Once you have prepared your omics object files and generated TSV files for each table in your data model, follow the directions below to **stage and deposit the data object files and all TSV files into an AnVIL-owned data deposit workspace**. You’ll work with a designated POC at the AnVIL team to shepherd the data (omic data and image files and TSV load files) into the deposit workspace (and upltimately the AnVIL data storage repository). Note that because each engagement will most likely be different, we will be further developing and refining (as needed) processes as we engage with submitters. -For actions taken prior to final process refinement, all transfers should involve the Data Processing WG and AnVIL team to ensure data integrity during the transfer process. +For actions taken prior to final process refinement, all transfers should involve the Data Processing WG and AnVIL team to ensure data integrity during the transfer process. ### Process overview -1. **Log into AnVIL** -You can use either a Google or Microsoft ID for SSO to access your assigned data deposit workspace on [anvil.terra.bio](https://anvil.terra.bio). -2. **Set up your workspace cloud storage** -To facilitate ingestion into TDR, the workspace cloud storage must have a particular directory structure. -3. **Upload data** -Last, you'll upload unstructured data files (omics files, images, etc.) to the data_files folder or sub-folder. +1. **Log into AnVIL** +You can use either a Google or Microsoft ID for SSO to access your assigned data deposit workspace on [anvil.terra.bio](https://anvil.terra.bio). +2. **Set up your workspace cloud storage** +To facilitate ingestion into TDR, the workspace cloud storage must have a particular directory structure. +3. **Upload data** +Last, you'll upload unstructured data files (omics files, images, etc.) to the data_files folder or sub-folder. 4. **Validation** (done by AnVIL ingestion team) ## Step-by-Step Instructions For details, see [How to stage data in your AnVIL deposit workspace](https://support.terra.bio/hc/en-us/articles/28970864241435-How-to-stage-data-in-an-AnVIL-deposit-workspace). -## Next steps (done by ingestion team) -After you stage the data in the deposit workspace, the AnVIL team will perform these pre-ingestion operations. +## Next steps (done by ingestion team) +After you stage the data in the deposit workspace, the AnVIL team will perform these pre-ingestion operations. ### Validation (automated) These tests will be executed once data has been ported to AnVIL. -- **QC check of submission form to genomic object files** +- **QC check of submission form to genomic object files** Check that the number of files match the number in the submission Google form -- **QC check of phenotype and metadata** +- **QC check of phenotype and metadata** Make sure the phenotype file data fields match the defined data model and sample IDs are consistent with phenotype and linked to a subject and consent. -- **Ingestion Validation (automated)** -To confirm the ingested data transferred as expected and maintain the file integrity, Google automatically checks the md5 sum of the end file against the original after each file transfer. - +- **Ingestion Validation (automated)** +To confirm the ingested data transferred as expected and maintain the file integrity, Google automatically checks the md5 sum of the end file against the original after each file transfer. + ### Data Indexing (genomic object files) diff --git a/docs/learn/data-submitters/submission-guide/prepare-for-submission.mdx b/docs/learn/data-submitters/submission-guide/prepare-for-submission.mdx index 880fc6df5..d3adaf1b8 100644 --- a/docs/learn/data-submitters/submission-guide/prepare-for-submission.mdx +++ b/docs/learn/data-submitters/submission-guide/prepare-for-submission.mdx @@ -1,26 +1,27 @@ --- +breadcrumbs: + - path: "/learn/submit-data" + text: "Submitting Data" description: "How to organize and format data tables for ingest to AnVIL" -title: "Step 3 - Prepare for submission" +title: "Step 3 - Prepare for Submission" --- -# Step 3 - Prepare for Submission -
    -In this step, you will prepare for data ingestion by organizing all required data and metadata in your data model (step 2) in a format compatible with AnVIL. +In this step, you will prepare for data ingestion by organizing all required data and metadata in your data model (step 2) in a format compatible with AnVIL.
    -### AnVIL accepts two types of data -- **Object files (large, unstructured data files)** -Object files include genomic and other omics data as well as image files. Object files require minimal metadata, some of which is generated by the AnVIL (such as full paths to the files in AnVIL cloud storage). -**Phenotypes and metadata** +### AnVIL accepts two types of data +- **Object files (large, unstructured data files)** +Object files include genomic and other omics data as well as image files. Object files require minimal metadata, some of which is generated by the AnVIL (such as full paths to the files in AnVIL cloud storage). +**Phenotypes and metadata** Clinical and phenotypic data as well as object file metadata will be submitted in TSV/TXT format (see requirements below). **Most studies are submitting both.** ### Formatting requirements for submitted data -You will submit all metadata (including phenotypic data) in a spreadsheet-like file (TSV/TXT format). To prepare data for submission, you will +You will submit all metadata (including phenotypic data) in a spreadsheet-like file (TSV/TXT format). To prepare data for submission, you will - Make sure all object files conform to AnVIL’s naming requirements - Generate a TSV file for each table in the data model (clinical data, phenotypes, and metadata from Step 2) diff --git a/docs/learn/data-submitters/submission-guide/qc-data.mdx b/docs/learn/data-submitters/submission-guide/qc-data.mdx index 02aff4ea3..f1549f116 100644 --- a/docs/learn/data-submitters/submission-guide/qc-data.mdx +++ b/docs/learn/data-submitters/submission-guide/qc-data.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/submit-data" + text: "Submitting Data" description: "An overview of the AnVIL data QC process." title: "Step 5 - QC Data" --- -# Step 5 - QC Data - After submission, Data Submitters should evaluate genomic data (ex. BAMs or CRAMS) for basic sequence yield and quality control (QC) metrics. Scroll down for an example of Whole-genome Sequence (WGS) and Whole-exome Sequence (WES) QC metrics that ensures depth and breadth of coverage requirements (provided by AnVIL as a resource for data submitters). diff --git a/docs/learn/data-submitters/submission-guide/set-up-a-data-model.mdx b/docs/learn/data-submitters/submission-guide/set-up-a-data-model.mdx index b45ca7166..28c1900e9 100644 --- a/docs/learn/data-submitters/submission-guide/set-up-a-data-model.mdx +++ b/docs/learn/data-submitters/submission-guide/set-up-a-data-model.mdx @@ -1,94 +1,93 @@ --- +breadcrumbs: + - path: "/learn/submit-data" + text: "Submitting Data" description: "This doc describes what the AnVIL data model encompasses." -title: "Set Up a Data Model" +title: "Step 2 - Set Up a Data Model" --- -# Step 2 - Set Up a Data Model -
    After your dataset has been approved by the AnVIL Data Ingestion Committee, you will need to set up and submit your data model, specifying what data you have and how data are connected. The AnVIL Data Model is intended to: - - Standardize the data submitted to the AnVIL in order to accept a broad range of high-quality data across consortia + - Standardize the data submitted to the AnVIL in order to accept a broad range of high-quality data across consortia - Maximize data findability and usefulness, and facilitate cross-study analysis The first goal requires a very flexible data model, and the second requires some constraints on the data model. The guidelines below are intended to help meet those two goals.
    -As you get started, we recommend you review the AnVIL Data Model Data Dictionary [here](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?usp=sharing&ouid=112123611582571421629&rtpof=true&sd=true) to learn more about the general structure (which we will describe in more detail below). You’ll coordinate with the AnVIL data ingest team to facilitate submission activities. +As you get started, we recommend you review the AnVIL Data Model Data Dictionary [here](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?usp=sharing&ouid=112123611582571421629&rtpof=true&sd=true) to learn more about the general structure (which we will describe in more detail below). You’ll coordinate with the AnVIL data ingest team to facilitate submission activities. If your dataset has been accepted by AnVIL and does not easily fit into an existing template, please reach out to the AnVIL Team at [help@lists.anvilproject.org](mailto:help@lists.anvilproject.org). -You’ll end this step with a better understanding of what your data dictionary will include for your data model. Step 3 will instruct you on preparing your data according to your data model for submission to AnVIL. +You’ll end this step with a better understanding of what your data dictionary will include for your data model. Step 3 will instruct you on preparing your data according to your data model for submission to AnVIL. ## 2.1 \- First Steps ### Coordinate with the AnVIL Data Ingest Team -To **collaborate with AnVIL** on uploading data, you’ll **notify AnVIL of completed dbGaP registration** in one of two ways. +To **collaborate with AnVIL** on uploading data, you’ll **notify AnVIL of completed dbGaP registration** in one of two ways. -* Through an already open AnVIL Zendesk ticket +* Through an already open AnVIL Zendesk ticket * By contacting AnVIL support at [anvil-data@broadinstitute.org](mailto:anvil-data@brodinstitute.org) -It is useful to coordinate with AnVIL before you start to set up your data model, in case you run into questions or problems. +It is useful to coordinate with AnVIL before you start to set up your data model, in case you run into questions or problems. ## 2.2 \- Create Your AnVIL Data Model A data model explicitly determines the structure of data. It organizes data elements and standardizes how the data elements relate to one another. -We encourage you to read the [README](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?usp=sharing&ouid=112123611582571421629&rtpof=true&sd=true) and [AnVIL Table Overview](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?gid=139218770#gid=139218770) tabs in the AnVIL Data Model data dictionary as you begin creating your AnVIL Data Model. - -These tabs include helpful information such as +We encourage you to read the [README](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?usp=sharing&ouid=112123611582571421629&rtpof=true&sd=true) and [AnVIL Table Overview](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?gid=139218770#gid=139218770) tabs in the AnVIL Data Model data dictionary as you begin creating your AnVIL Data Model. -* The overall purpose of the data dictionary -* Information on the AnVIL Data Model (findability subset) -* Details on how to expand the data model to fit your needs -* Expectations on how to submit your schema +These tabs include helpful information such as - +* The overall purpose of the data dictionary +* Information on the AnVIL Data Model (findability subset) +* Details on how to expand the data model to fit your needs +* Expectations on how to submit your schema ### Data Model Requirements -The linked [Data Dictionary](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?usp=sharing&ouid=112123611582571421629&rtpof=true&sd=true) provides context for mapping data to the AnVIL core findability model. It includes information on the tables to be included, which concepts should be included in each table, and suggested coding systems for each. The Data Dictionary is a learning resource to assist users with creating their own AnVIL Data Model for submission. +The linked [Data Dictionary](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?usp=sharing&ouid=112123611582571421629&rtpof=true&sd=true) provides context for mapping data to the AnVIL core findability model. It includes information on the tables to be included, which concepts should be included in each table, and suggested coding systems for each. The Data Dictionary is a learning resource to assist users with creating their own AnVIL Data Model for submission. Please read the descriptions for each table outlined in the [Data Dictionary](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?usp=sharing&ouid=112123611582571421629&rtpof=true&sd=true) carefully, and reach out to your AnVIL team contact with questions not addressed in the data dictionary document. These data model requirements help ensure AnVIL datasets are not only useful to the researchers who created them but also enable others to analyze data collectively across studies in the AnVIL Terra platform. -Start by thinking of what data you have and how you have already organized it and how it may fit or need to be reorganized to fit the requirements. +Start by thinking of what data you have and how you have already organized it and how it may fit or need to be reorganized to fit the requirements. -### Core Tables for All Studies +### Core Tables for All Studies -At a high level, there are three core tables (“entities”) in the AnVIL Data Model: **BioSample**, **Donor**, and **File**. +At a high level, there are three core tables (“entities”) in the AnVIL Data Model: **BioSample**, **Donor**, and **File**. -The **BioSample** table is the only required table, although **Donor** and **File** tables are strongly encouraged to improve the usability and findability of the data. +The **BioSample** table is the only required table, although **Donor** and **File** tables are strongly encouraged to improve the usability and findability of the data. -The specifications for each table, including required fields, strongly recommended fields, field names, and field descriptions, are included in the Data Dictionary. Brief descriptions of these core tables, with notable requirements, are below. +The specifications for each table, including required fields, strongly recommended fields, field names, and field descriptions, are included in the Data Dictionary. Brief descriptions of these core tables, with notable requirements, are below. -* [**BioSample**](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?gid=323182902#gid=323182902) **(required):** Contains information about the sample(s) included in the study. Example data types include Anatomical Site from which the biosample was taken and the cell type. - * The `biosample_id` (first column) is required. - * If a File table will not be submitted, the BioSample table must contain a column indicating which samples correspond to which files. -* [**Donor**](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?gid=1923143452#gid=1923143452) **(strongly recommended):** Contains demographic and phenotypic information about the donor. Example data types include phenotypic sex, reported ethnicity, and genetic ancestry. - * The `donor_id` (first column) is required. -* [**File**](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?gid=462062238#gid=462062238) **(strongly recommended):** Contains information for files associated with the study. Example data includes DRS ID, filename, and file type. - * The `file_id` (first column) is required. - * It is strongly recommended that the table includes a `BioSample.biosample_id` column to link the biosample id between tables. +* [**BioSample**](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?gid=323182902#gid=323182902) **(required):** Contains information about the sample(s) included in the study. Example data types include Anatomical Site from which the biosample was taken and the cell type. + * The `biosample_id` (first column) is required. + * If a File table will not be submitted, the BioSample table must contain a column indicating which samples correspond to which files. +* [**Donor**](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?gid=1923143452#gid=1923143452) **(strongly recommended):** Contains demographic and phenotypic information about the donor. Example data types include phenotypic sex, reported ethnicity, and genetic ancestry. + * The `donor_id` (first column) is required. +* [**File**](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?gid=462062238#gid=462062238) **(strongly recommended):** Contains information for files associated with the study. Example data includes DRS ID, filename, and file type. + * The `file_id` (first column) is required. + * It is strongly recommended that the table includes a `BioSample.biosample_id` column to link the biosample id between tables. * AnVIl will add DRS URIs for object files as part of the ingest process. -### Optional Tables +### Optional Tables The AnVIL Data Model Data Dictionary includes other optional tables you can use to contain data about conditions, activity, and your project. Brief descriptions of these optional tables, with notable requirements, are below. -* [**Condition Table:**](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?gid=524240363#gid=524240363) Contains information about condition(s) and phenotypes associated with a donor. - * The `condition_id` (first column) is required. - * It is strongly recommended that the table includes a `donor_id` column to link the donor id between tables. -* [**Activity Table:**](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?gid=1184610893#gid=1184610893) Contains details on different types of activities used to generate or process data. Example data includes sequencing method, reference assembly, and assay type. - * The `activity_id` (first column) is required. - * Should include a `file_id` column that references the associated file. -* [**Project Table:**](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?gid=672801364#gid=672801364) Contains information about the project the study is a part of. Example data includes funding, and principal investigator. It is strongly recommended that the table includes a title field. - * The `project_id` (first column) is required. +* [**Condition Table:**](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?gid=524240363#gid=524240363) Contains information about condition(s) and phenotypes associated with a donor. + * The `condition_id` (first column) is required. + * It is strongly recommended that the table includes a `donor_id` column to link the donor id between tables. +* [**Activity Table:**](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?gid=1184610893#gid=1184610893) Contains details on different types of activities used to generate or process data. Example data includes sequencing method, reference assembly, and assay type. + * The `activity_id` (first column) is required. + * Should include a `file_id` column that references the associated file. +* [**Project Table:**](https://docs.google.com/spreadsheets/d/1D0L5wm5pnpLKYqakYm9tyy-VmP5oJmGR/edit?gid=672801364#gid=672801364) Contains information about the project the study is a part of. Example data includes funding, and principal investigator. It is strongly recommended that the table includes a title field. + * The `project_id` (first column) is required. #### Non-standard Data Models @@ -96,7 +95,7 @@ If your dataset has been accepted by AnVIL and has needs not described here, ple ## 2.3 \- Generate Your Data Dictionary -All AnVIL studies must submit a **Data Dictionary table** (spreadsheet file) that defines your complete data model. It includes (in separate tabs for each table) field names, field descriptions, field types, examples, enumeration values (where applicable), and multi-value delimiter symbols used (where applicable) for each table in the data model. +All AnVIL studies must submit a **Data Dictionary table** (spreadsheet file) that defines your complete data model. It includes (in separate tabs for each table) field names, field descriptions, field types, examples, enumeration values (where applicable), and multi-value delimiter symbols used (where applicable) for each table in the data model. **For a template Data Dictionary with all required and suggested tables**, click [here](https://github.com/anvilproject/Data-Model/blob/main/AnVILDataSubmissionFindabilitySubsetSchema.template.xlsx). To download the AnVILDataSubmissionFindabilitySubsetSchema.template.xlsx file, click on the three-dot icon at the top right and then click Download.  @@ -125,5 +124,5 @@ All AnVIL studies must submit a **Data Dictionary table** (spreadsheet file) tha ### Contact information -AnVIL Data Ingest Team [anvil-data@broadinstitute.org](mailto:anvil-data@broadinstitute.org) +AnVIL Data Ingest Team [anvil-data@broadinstitute.org](mailto:anvil-data@broadinstitute.org) AnVIL Help Team help@lists.anvilproject.org diff --git a/docs/learn/find-data.mdx b/docs/learn/find-data.mdx new file mode 100644 index 000000000..077be7a5e --- /dev/null +++ b/docs/learn/find-data.mdx @@ -0,0 +1,21 @@ +--- +description: "Discover and access AnVIL datasets." +enableContentEnd: false +enableSupportForum: true +overview: + - label: "Explore Datasets" + links: + - label: "AnVIL Data Explorer" + url: https://explore.anvilproject.org/datasets + - label: "AnVIL Dataset Catalog" + url: https://anvilproject.org/data + - label: "Access Data" + links: + - /learn/find-data/requesting-data-access + - /learn/find-data/data-access-controls + - /learn/find-data/bringing-your-own-data + - /learn/find-data/cross-platform-data-access-with-drs-uris-in-terra +title: "Finding Data" +--- + + diff --git a/docs/learn/accessing-data/bringing-your-own-data.mdx b/docs/learn/find-data/bringing-your-own-data.mdx similarity index 86% rename from docs/learn/accessing-data/bringing-your-own-data.mdx rename to docs/learn/find-data/bringing-your-own-data.mdx index 6fda9a398..769b50534 100644 --- a/docs/learn/accessing-data/bringing-your-own-data.mdx +++ b/docs/learn/find-data/bringing-your-own-data.mdx @@ -1,8 +1,9 @@ --- +breadcrumbs: + - path: "/learn/find-data" + text: "Finding Data" description: "Uploading your own data to AnVIL's Terra instance." title: "Bringing Your Own Data" --- -# Bringing Your Own Data - Users can also upload their own datasets to AnVIL's Terra instance and utilize Terra's sharing mechanisms to manage and control access. Terra's access controls can also be leveraged to protect pre-release datasets during upload and [QC](/learn/data-submitters/submission-guide/ingesting-data#42-validation-steps-automated). diff --git a/docs/learn/reference/cross-platform-data-access-with-drs-uris-in-terra.mdx b/docs/learn/find-data/cross-platform-data-access-with-drs-uris-in-terra.mdx similarity index 93% rename from docs/learn/reference/cross-platform-data-access-with-drs-uris-in-terra.mdx rename to docs/learn/find-data/cross-platform-data-access-with-drs-uris-in-terra.mdx index 9341ce371..8d89fb856 100644 --- a/docs/learn/reference/cross-platform-data-access-with-drs-uris-in-terra.mdx +++ b/docs/learn/find-data/cross-platform-data-access-with-drs-uris-in-terra.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/find-data" + text: "Finding Data" description: "An overview of what DRS Uniform Resource Identifiers (URIs) are, and why they are used in Terra. It also outlines where they are used and how to access the data they represent in the Terra platform." title: "Accessing Data with DRS URIs in Terra" --- -# Accessing Data with DRS URIs in Terra - Varying formats for identifying data stored on different cloud-based infrastructures make it challenging to combine data across cloud infrastructures effectively. The GA4GH Data Repository Service (DRS) defines a generic interface for data repositories to allow access to data in a single, standard way. DRS gives a dataset on any infrastructure a unique ID mapping that allows for flexible retrieval. diff --git a/docs/learn/accessing-data/data-access-controls.mdx b/docs/learn/find-data/data-access-controls.mdx similarity index 81% rename from docs/learn/accessing-data/data-access-controls.mdx rename to docs/learn/find-data/data-access-controls.mdx index f8e86f3cc..36ebc5466 100644 --- a/docs/learn/accessing-data/data-access-controls.mdx +++ b/docs/learn/find-data/data-access-controls.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/find-data" + text: "Finding Data" description: "AnVIL access controls can selectively grant access to groups with different access requirements." title: "Data Access Controls" --- -# Data Access Controls - AnVIL access controls can selectively grant access to groups with different access requirements. @@ -24,7 +25,7 @@ Members of the data-generating consortium are granted access directly in Terra b ### External Researcher Access -Members of the wider community may [request access through dbGaP](/learn/accessing-data/requesting-data-access#accessing-controlled-access-data). Upon receiving approval in dbGaP, the researcher will be able to access the requested data within AnVIL once they have [linked their Terra account and eRA Commons address](/learn/accessing-data/requesting-data-access#linking-your-terra-account-and-your-era-commons-address). +Members of the wider community may [request access through dbGaP](/learn/find-data/requesting-data-access#accessing-controlled-access-data). Upon receiving approval in dbGaP, the researcher will be able to access the requested data within AnVIL once they have [linked their Terra account and eRA Commons address](/learn/find-data/requesting-data-access#linking-your-terra-account-and-your-era-commons-address). To synchronize dbGaP approvals with Terra, dbGaP periodically deposits a copy of their access list to a secure FTP site. This access list is then read by Terra and synchronized to the appropriate workspace auth groups. In this manner, workspace auth group membership for external researchers is maintained solely by dbGaP. diff --git a/docs/learn/accessing-data/requesting-data-access.mdx b/docs/learn/find-data/requesting-data-access.mdx similarity index 94% rename from docs/learn/accessing-data/requesting-data-access.mdx rename to docs/learn/find-data/requesting-data-access.mdx index a0c64408e..637143596 100644 --- a/docs/learn/accessing-data/requesting-data-access.mdx +++ b/docs/learn/find-data/requesting-data-access.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/find-data" + text: "Finding Data" description: "AnVIL is a repository for open and controlled access datasets. Dataset access is controlled in adherence to NIH Policy and in line with the standards set forth in the individual consents involved in each cohort." title: "Requesting Data Access" --- -# Requesting Data Access - AnVIL is a repository for open and controlled access datasets. Dataset access is controlled in adherence to NIH Policy and in line with the standards set @@ -17,10 +18,10 @@ AnVIL provides three types of data access: 1. Open Access - Open access datasets are accessible to all upon logging into [Terra](https://anvil.terra.bio/#workspaces) or the [AnVIL Data Explorer](https://explore.anvilproject.org). -1. [Controlled Access](/learn/accessing-data/requesting-data-access#accessing-controlled-access-data) - Controlled Access datasets are accessible to researchers for +1. [Controlled Access](/learn/find-data/requesting-data-access#accessing-controlled-access-data) - Controlled Access datasets are accessible to researchers for use matching the data's [dbGaP consent codes](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4721915/). Access is granted by the dbGaP data access process described below. -1. [Consortium Access](/learn/accessing-data/requesting-data-access#accessing-consortium-access-data) - Consortium Access datasets are accessible to consortia +1. [Consortium Access](/learn/find-data/requesting-data-access#accessing-consortium-access-data) - Consortium Access datasets are accessible to consortia members under the consortium data sharing agreement. ## Accessing Controlled Access Data diff --git a/docs/learn/get-started.mdx b/docs/learn/get-started.mdx new file mode 100644 index 000000000..9c2112e19 --- /dev/null +++ b/docs/learn/get-started.mdx @@ -0,0 +1,25 @@ +--- +description: "Set up your AnVIL account and access the AnVIL data ecosystem." +enableContentEnd: false +enableSupportForum: true +overview: + - label: "Step By Step Guide" + links: + - label: "Getting Started on AnVIL" + url: https://jhudatascience.org/AnVIL_Book_Getting_Started/index.html + - label: "Account Setup" + links: + - "/learn/get-started/overview-of-account-setup" + - "/learn/get-started/obtaining-a-google-id" + - "/learn/get-started/creating-a-terra-account" + - label: "Lab Account Setup" + links: + - "/learn/get-started/setting-up-lab-accounts" + - label: "Billing Setup" + links: + - "/learn/get-started/billing-concepts" + - "/learn/get-started/creating-a-google-cloud-billing-account" +title: "Getting Started" +--- + + diff --git a/docs/learn/billing-setup/billing-concepts.mdx b/docs/learn/get-started/billing-concepts.mdx similarity index 98% rename from docs/learn/billing-setup/billing-concepts.mdx rename to docs/learn/get-started/billing-concepts.mdx index 64173adaa..015057ee9 100644 --- a/docs/learn/billing-setup/billing-concepts.mdx +++ b/docs/learn/get-started/billing-concepts.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/get-started" + text: "Getting Started" description: "An overview of cloud billing for Terra in AnVIL." -title: "Setting up Cloud Billing" +title: "Overview of Billing Concepts" --- -# Overview of Billing Concepts - Before setting up billing, it is important to have a working knowledge of cloud billing in the Google Cloud Platform and Terra. Key concepts are listed below, along with additional Terra documentation resources. ![Key Concepts](/consortia/learn/billing-setup/key-concepts.png) diff --git a/docs/learn/billing-setup/creating-a-google-cloud-billing-account.mdx b/docs/learn/get-started/creating-a-google-cloud-billing-account.mdx similarity index 95% rename from docs/learn/billing-setup/creating-a-google-cloud-billing-account.mdx rename to docs/learn/get-started/creating-a-google-cloud-billing-account.mdx index ecd4979fb..1bc2b14e1 100644 --- a/docs/learn/billing-setup/creating-a-google-cloud-billing-account.mdx +++ b/docs/learn/get-started/creating-a-google-cloud-billing-account.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/get-started" + text: "Getting Started" description: "How to create a Google Cloud Billing Account." title: "Creating a Google Cloud Billing Account" --- -# Creating a Google Cloud Billing Account - Google Cloud Billing Accounts allow you to configure payment and track spending in GCP. @@ -23,7 +24,7 @@ Google Cloud Billing Accounts allow you to configure payment and track spending Before setting up your Google Cloud Billing Account: -1. Decide what email address to use. The email you choose must be associated with a Google ID (see [Obtaining a Google ID](/learn/account-setup/obtaining-a-google-id)) and must be the email you will use to log in to Terra. +1. Decide what email address to use. The email you choose must be associated with a Google ID (see [Obtaining a Google ID](/learn/get-started/obtaining-a-google-id)) and must be the email you will use to log in to Terra. 1. Decide what payment method (credit card, debit card, PayPal, or bank account) you will use. 1. Determine if you will set up as an individual or business. If you are setting up an account for a company, organization, lab, or institution, select "Business." If you are setting up as yourself, choose "Individual." diff --git a/docs/learn/account-setup/creating-a-terra-account.mdx b/docs/learn/get-started/creating-a-terra-account.mdx similarity index 74% rename from docs/learn/account-setup/creating-a-terra-account.mdx rename to docs/learn/get-started/creating-a-terra-account.mdx index 8af032393..eaae3a3ce 100644 --- a/docs/learn/account-setup/creating-a-terra-account.mdx +++ b/docs/learn/get-started/creating-a-terra-account.mdx @@ -1,11 +1,12 @@ --- +breadcrumbs: + - path: "/learn/get-started" + text: "Getting Started" description: "" -title: "Account Setup" +title: "Creating a Terra Account" --- -# Creating a Terra Account - -To register for a Terra account, you will need a Gmail account or another email account (an institutional email, for example) associated with a Google identity for SSO. See [Obtaining a Google Id](/learn/account-setup/obtaining-a-google-id) for instructions on creating a Google ID. +To register for a Terra account, you will need a Gmail account or another email account (an institutional email, for example) associated with a Google identity for SSO. See [Obtaining a Google Id](/learn/get-started/obtaining-a-google-id) for instructions on creating a Google ID. See [Set up a Terra account](https://support.terra.bio/hc/en-us/articles/360028235911-How-to-register-for-a-Terra-account) for instructions on setting up your Terra account. diff --git a/docs/learn/account-setup/obtaining-a-google-id.mdx b/docs/learn/get-started/obtaining-a-google-id.mdx similarity index 91% rename from docs/learn/account-setup/obtaining-a-google-id.mdx rename to docs/learn/get-started/obtaining-a-google-id.mdx index fe5bde478..7755d5c38 100644 --- a/docs/learn/account-setup/obtaining-a-google-id.mdx +++ b/docs/learn/get-started/obtaining-a-google-id.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/get-started" + text: "Getting Started" description: "Obtaining a Google ID" -title: "Account Setup" +title: "Obtaining a Google ID" --- -# Obtaining a Google ID - A Google ID is a Google level resource required to create GCP and Terra accounts. A Google ID is simply an email address. This email address must be either: a non Google email address that you have associated with a Google Account or a Gmail, Google Workspace (formerly G Suite), or Google Identity email address. diff --git a/docs/learn/account-setup/overview-of-account-setup.mdx b/docs/learn/get-started/overview-of-account-setup.mdx similarity index 83% rename from docs/learn/account-setup/overview-of-account-setup.mdx rename to docs/learn/get-started/overview-of-account-setup.mdx index 5b7e67ad4..6b7cd0e6d 100644 --- a/docs/learn/account-setup/overview-of-account-setup.mdx +++ b/docs/learn/get-started/overview-of-account-setup.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/get-started" + text: "Getting Started" description: "Overview of Account Setup" -title: "Account Setup" +title: "Overview of Account Setup" --- -# Overview of Account Setup - To use AnVIL, you will need a Google account to authenticate with Terra and the AnVil Data Explorer and to associate your Terra account with a Google Cloud Platform (GCP) account. If you are accessing controlled-access data, you will also need an eRA commons id or to be a member of a data-sharing consortium. @@ -21,4 +22,4 @@ The guides below walk you through each step in the account setup process. 1. [Link your Terra and eRA Commons ID](https://support.terra.bio/hc/en-us/articles/360038086332-Linking-Terra-to-External-Servers) - To use controlled-access data on Terra, you will need to link your Terra user ID to your authorization account (such as a dbGaP account). Linking to external servers will allow Terra to automatically determine if you can access controlled datasets hosted in Terra (ex., TCGA, TOPMed, etc.) based on your approved dbGaP applications. -Next, see [Requesting Data Access](/learn/accessing-data/requesting-data-access) for more information about accessing controlled access data and configuring Terra to read your data access privileges from dbGaP or your consortia access control list. +Next, see [Requesting Data Access](/learn/find-data/requesting-data-access) for more information about accessing controlled access data and configuring Terra to read your data access privileges from dbGaP or your consortia access control list. diff --git a/docs/learn/investigators/setting-up-lab-accounts.mdx b/docs/learn/get-started/setting-up-lab-accounts.mdx similarity index 97% rename from docs/learn/investigators/setting-up-lab-accounts.mdx rename to docs/learn/get-started/setting-up-lab-accounts.mdx index 22280f416..8ae19bc24 100644 --- a/docs/learn/investigators/setting-up-lab-accounts.mdx +++ b/docs/learn/get-started/setting-up-lab-accounts.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/get-started" + text: "Getting Started" description: "An overview of best practices for account setup in AnVIL to effectively track and control cloud costs." -title: "Setting up Lab Accounts and Billing in AnVIL" +title: "Setting up Lab Accounts and Billing" --- -# Setting up Lab Accounts and Billing in AnVIL - A guide intended to help PIs and lab managers set up and configure the accounts and billing resources required for data analysts to run analyses in Terra. This guide presents a recommended approach for labs new to cloud computing to set up billing on AnVIL. It enables detailed cloud cost accounting, provides users feedback on the costs of their analyses, and reduces the opportunity for unexpected cloud compute costs. @@ -33,9 +34,9 @@ Knowledge of these concepts and how they interrelate will help you implement the Critical concepts for review are: -1. **Terra Workspaces and Permissions** - For an overview of Terra workspaces, workspace permissions, and general billing information, see [ Getting Started with AnVIL](/learn). -1. **Cloud Cost Basics** - For an overview of cloud costs, see [Understanding Cloud Costs](/learn/introduction/understanding-cloud-costs). -1. **Billing Concepts** - For an overview of Google Cloud Platform and Terra billing concepts, see [Overview of Billing Concepts](/learn/billing-setup/billing-concepts). +1. **Terra Workspaces and Permissions** - For an overview of Terra workspaces, workspace permissions, and general billing information, see [Getting Started with AnVIL](/learn/get-started). +1. **Cloud Cost Basics** - For an overview of cloud costs, see [Understanding Cloud Costs](/learn/control-cloud-costs/understanding-cloud-costs). +1. **Billing Concepts** - For an overview of Google Cloud Platform and Terra billing concepts, see [Overview of Billing Concepts](/learn/get-started/billing-concepts). ## Lab Setup Design @@ -103,7 +104,7 @@ Before you start, you will want to plan out your setup and: 1. Determine the set of Google Billing Accounts to create. This guide recommends one Google Billing Account per funding source (grant) to cleanly separate costs. 1. Determine the list of Terra Billing Projects to create - This guide recommends one per Data Analyst. If finer-grained reporting is desired, create on Terra Billing Project per each of a data analyst’s workspaces. Use a consistent naming convention that will help you identify the user and project the Terra Billing Project is for. 1. Determine the set of workspaces to create. This initially may be one per data analyst. -1. If you will be cloning a data workspace with controlled access data for data analysts, make sure each data analyst is a member of the workspace’s Authorization Domain. For more information, see [Accessing Data](/learn/accessing-data/requesting-data-access). +1. If you will be cloning a data workspace with controlled access data for data analysts, make sure each data analyst is a member of the workspace’s Authorization Domain. For more information, see [Requesting Data Access](/learn/find-data/requesting-data-access). 1. Determine the expected costs, budget, and budget alerts you would like for each Terra Billing Project. See [Controlling Cloud Costs - Sample Use Cases](https://support.terra.bio/hc/en-us/articles/360029772212-Controlling-Cloud-costs-sample-use-cases) for a framework for estimating cloud costs. This guide recommends setting alerts at 50% and 90% of the expected budget. ### 1 - Create the Team’s Google Accounts @@ -134,13 +135,13 @@ Once lab members have a Google ID, they can use that email address to create a T To create a Terra account: -1. Follow the instructions provided in the [Account Setup Guide](/learn/account-setup/overview-of-account-setup). +1. Follow the instructions provided in the [Account Setup Guide](/learn/get-started/overview-of-account-setup). ### 3 - Create Your Lab’s Google Billing Accounts If this is your first Google Billing Account, see [Creating a Google Billing - Account](/learn/billing-setup/creating-a-google-cloud-billing-account) for a + Account](/learn/get-started/creating-a-google-cloud-billing-account) for a walk-through of the first-time flow. Use the instructions below to add additional accounts. diff --git a/docs/learn/introduction/guides-and-tutorials.mdx b/docs/learn/introduction/guides-and-tutorials.mdx deleted file mode 100644 index 1583dc5bf..000000000 --- a/docs/learn/introduction/guides-and-tutorials.mdx +++ /dev/null @@ -1,26 +0,0 @@ ---- -description: "Overview of persona-specific guides and tutorials for the AnVIL platform." -title: "Guides and Tutorials" ---- - -# Guides and Tutorials - -Key guides and tutorials grouped by user persona are listed below. For a general onboarding and getting-started tour, see the [Learn](/learn) section. See [Getting Help with AnVIL Tools and Components](/help) to obtain personalized help for each of AnVIL's components and tools. - -### General - -[Getting Started](/learn#getting-started) - An overview of AnVIL with a focus on onboarding and preparing new users to run genomic analyses in the cloud. - -### Investigators - -- [Setting Up Lab Accounts](/learn/investigators/setting-up-lab-accounts) - Follow these instructions to get your accounts, set up billing, and set up your team members to do research with AnVIL. -- [Creating Cloud Cost Budget Justifications](/learn/investigators/budget-templates) - Templates for calculating a cloud cost budget and writing a budget justification for using AnVIL in your grant applications. - -### Data Analysts - -- [Getting Started with Bioconductor](/learn/interactive-analysis/getting-started-with-bioconductor) - Guides helping R / Bioconductor users start RStudio or Jupyter for interactive analysis, and workflows for large-scale data processing. -- [Getting Started with Galaxy](/learn/interactive-analysis/getting-started-with-galaxy) - A step-by-step tutorial demonstrating how to compute quality metrics of unaligned reads, align the reads to a reference genome using bowtie2, plot a coverage histogram, call variants using FreeBayes, and then summarize the variant calls using bcftools. - -### Data Submitters - -[Data Submission Guide](/learn/data-submitters/submission-guide/data-submitters-overview) - An overview of the process of submitting data to AnVIL and confirming eligibility for submission. diff --git a/docs/learn/investigators.mdx b/docs/learn/investigators.mdx deleted file mode 100644 index 5d82e71f9..000000000 --- a/docs/learn/investigators.mdx +++ /dev/null @@ -1,18 +0,0 @@ ---- -description: "Overview of investigators guides and tutorials for the AnVIL platform." -title: "Investigators Tutorial Overview" ---- - -# Investigators - Guides and Tutorials - - - This section lists guides, tutorials, and other resources to help principal investigators and lab managers enable their team to use AnVIL. - - -### Lab Setup - -[Setting Up Lab Accounts](/learn/investigators/setting-up-lab-accounts) - Follow these instructions to get your accounts, set up billing, and set up your team members to do research with AnVIL. - -### Grant Proposals - -[Creating Cloud Cost Budget Justifications](/learn/investigators/budget-templates) - Templates for calculating a cloud cost budget and writing a budget justification for using AnVIL in your grant applications. diff --git a/docs/learn/reference/gtex-v8-free-egress-instructions.mdx b/docs/learn/reference/gtex-v8-free-egress-instructions.mdx index 557e0b225..ec8bae2dc 100644 --- a/docs/learn/reference/gtex-v8-free-egress-instructions.mdx +++ b/docs/learn/reference/gtex-v8-free-egress-instructions.mdx @@ -1,14 +1,14 @@ --- +breadcrumbs: + - path: "/learn/find-data" + text: "Finding Data" description: "Instructions for free egress download of GTEx v8 from the AnVIL Gen3 Data Commons" title: "GTEx v8 - Egress Instructions" --- - -# GTEx v8 - Egress Instructions - Starting October 1, 2024, the AnVIL Gen3 Commons and free downloadable version of GTEx v8 through Gen3 will be decommissioned. Please see [Sunsetting Gen3 Functionality in AnVIL](/news/2024/09/16/sunsetting-gen3-in-anvil) for more information. ## Download GTEx data to AnVIL using DUOS -For step-by-step instructions to access and export GTEx data to an AnVIL workspace via DUOS, see [How to access GTEx data in Terra](https://support.terra.bio/hc/en-us/articles/30873545719451-How-to-access-GTEx-data-in-Terra). +For step-by-step instructions to access and export GTEx data to an AnVIL workspace via DUOS, see [How to access GTEx data in Terra](https://support.terra.bio/hc/en-us/articles/30873545719451-How-to-access-GTEx-data-in-Terra). diff --git a/docs/learn/run-analyses-workflows.mdx b/docs/learn/run-analyses-workflows.mdx new file mode 100644 index 000000000..7e6a4603f --- /dev/null +++ b/docs/learn/run-analyses-workflows.mdx @@ -0,0 +1,18 @@ +--- +description: "Run and scale analysis workflows with Dockstore and Terra." +enableContentEnd: false +enableSupportForum: true +overview: + - label: "Terra" + links: + - "/learn/run-analyses-workflows/intro-to-terra" + - "/learn/run-analyses-workflows/using-example-workspaces" + - "/learn/run-analyses-workflows/running-gatk" + - label: "Dockstore" + links: + - "/learn/run-analyses-workflows/intro-to-dockstore" + - "/learn/run-analyses-workflows/running-galaxy-workflows-from-dockstore" +title: "Running Analyses Workflows" +--- + + diff --git a/docs/learn/introduction/intro-to-dockstore.mdx b/docs/learn/run-analyses-workflows/intro-to-dockstore.mdx similarity index 94% rename from docs/learn/introduction/intro-to-dockstore.mdx rename to docs/learn/run-analyses-workflows/intro-to-dockstore.mdx index 14b4b3f40..07b092055 100644 --- a/docs/learn/introduction/intro-to-dockstore.mdx +++ b/docs/learn/run-analyses-workflows/intro-to-dockstore.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/run-analyses-workflows" + text: "Running Analyses Workflows" description: "An overview of finding genomic analysis workflows in Dockstore and exporting and running them in AnVIL." -title: "Getting Started with Dockstore in AnVIL" +title: "Introduction to Dockstore" --- -# Introduction to Dockstore - diff --git a/docs/learn/data-analysts/rstudio-gsg-video.mdx b/docs/learn/run-interactive-analyses/rstudio-gsg-video.mdx similarity index 79% rename from docs/learn/data-analysts/rstudio-gsg-video.mdx rename to docs/learn/run-interactive-analyses/rstudio-gsg-video.mdx index d8ac38218..d35b31311 100644 --- a/docs/learn/data-analysts/rstudio-gsg-video.mdx +++ b/docs/learn/run-interactive-analyses/rstudio-gsg-video.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/run-interactive-analyses" + text: "Running Interactive Analyses" description: "A video introduction to using RStudio on the AnVIL platform." -title: "Starting RStudio on the AnVIL Platform" +title: "Starting RStudio" --- -# Starting RStudio - diff --git a/docs/learn/data-analysts/single-cell-rnaseq-with-orchestrating-single-cell-analysis-in-r-bioconductor.mdx b/docs/learn/run-interactive-analyses/single-cell-rnaseq-with-orchestrating-single-cell-analysis-in-r-bioconductor.mdx similarity index 97% rename from docs/learn/data-analysts/single-cell-rnaseq-with-orchestrating-single-cell-analysis-in-r-bioconductor.mdx rename to docs/learn/run-interactive-analyses/single-cell-rnaseq-with-orchestrating-single-cell-analysis-in-r-bioconductor.mdx index b994d4191..6f111c7cf 100644 --- a/docs/learn/data-analysts/single-cell-rnaseq-with-orchestrating-single-cell-analysis-in-r-bioconductor.mdx +++ b/docs/learn/run-interactive-analyses/single-cell-rnaseq-with-orchestrating-single-cell-analysis-in-r-bioconductor.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/run-interactive-analyses" + text: "Running Interactive Analyses" description: "An introduction to a resource, developed primarily by Aaron Lun of Genentech, Inc., that employs Bioconductor resources for many aspects of the analysis of single-cell RNA-seq data. The resource is a computable book written in R Markdown, published at https://bioconductor.org/books/release/OSCA/." -title: "Single-cell RNASeq with 'Orchestrating Single Cell Analysis' in R / Bioconductor" +title: "Single-cell RNASeq Analysis" --- -# Single-cell RNASeq with 'Orchestrating Single Cell Analysis' in R / Bioconductor - Vince Carey An introduction to a resource, developed primarily by Aaron Lun of Genentech, Inc., that employs Bioconductor resources for many aspects of the analysis of single-cell RNA-seq data. The resource is a "computable book" written in R Markdown, published at [https://bioconductor.org/books/release/OSCA](https://bioconductor.org/books/release/OSCA/). @@ -211,7 +212,7 @@ Notice the colData entry label.ont. We can use this mapping of cell types to the ![Mapping Cell Types to Cell Ontology](/consortia/learn/data-analysts/analysis-mapping-cell-types.png) -This just shows the positions in a portion of the Cell Ontology, of the first 20 cell types. See [Appendix 1](/learn/data-analysts/single-cell-rnaseq-with-orchestrating-single-cell-analysis-in-r-bioconductor#appendix-1-sketching-ontological-relationships) for details on how this is done. +This just shows the positions in a portion of the Cell Ontology, of the first 20 cell types. See [Appendix 1](/learn/run-interactive-analyses/single-cell-rnaseq-with-orchestrating-single-cell-analysis-in-r-bioconductor#appendix-1-sketching-ontological-relationships) for details on how this is done. To conclude the exploration, answer the following questions: diff --git a/docs/learn/data-analysts/the-r-bioconductor-anvil-package.mdx b/docs/learn/run-interactive-analyses/the-r-bioconductor-anvil-package.mdx similarity index 96% rename from docs/learn/data-analysts/the-r-bioconductor-anvil-package.mdx rename to docs/learn/run-interactive-analyses/the-r-bioconductor-anvil-package.mdx index 715a4c2ee..4b4181fa9 100644 --- a/docs/learn/data-analysts/the-r-bioconductor-anvil-package.mdx +++ b/docs/learn/run-interactive-analyses/the-r-bioconductor-anvil-package.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/run-interactive-analyses" + text: "Running Interactive Analyses" description: "An exploration of how workspaces provide a framework for managing data and large-scale analyses." -title: "The R / Bioconductor AnVIL package for easy access to buckets, data, and workflows, and fast package installation" +title: "The R / Bioconductor AnVIL Package" --- -# The R / Bioconductor AnVIL Package - Martin Morgan, Nitesh Turaga An exploration of how workspaces provide a framework for managing data and large-scale analyses using the HCA Optimus Pipeline and 1000G-high-coverage-2019 workspaces and R using the AnVIL package. @@ -15,7 +16,7 @@ An exploration of how workspaces provide a framework for managing data and large Notes 1. Visit the [course schedule](/events/bioconductor-popup-workshops-20210503#other-sessions) for links to the recorded session, and to other workshops in the series. - 1. The material below requires a billing account. We provide a billing account during the workshop, but if you're following along on your own see '[Next Steps](/learn/data-analysts/the-r-bioconductor-anvil-package#next-steps)' for how to create a billing account. + 1. The material below requires a billing account. We provide a billing account during the workshop, but if you're following along on your own see '[Next Steps](/learn/run-interactive-analyses/the-r-bioconductor-anvil-package#next-steps)' for how to create a billing account. 1. Access to the workspaces we use may require registration; please [sign up](https://forms.gle/HCY2DM2QsuxAwdhv6) with your AnVIL email address. @@ -35,7 +36,7 @@ This week we'll explore how workspaces provide a framework for managing data and ### Previously... -- Notes and recorded session: [Using R / Bioconductor in AnVIL](/learn/data-analysts/using-r-bioconductor-in-anvil) +- Notes and recorded session: [Using R / Bioconductor in AnVIL](/learn/run-interactive-analyses/using-r-bioconductor-in-anvil) ### Essential Steps diff --git a/docs/learn/data-analysts/using-anvil-for-teaching-r-bioconductor.mdx b/docs/learn/run-interactive-analyses/using-anvil-for-teaching-r-bioconductor.mdx similarity index 98% rename from docs/learn/data-analysts/using-anvil-for-teaching-r-bioconductor.mdx rename to docs/learn/run-interactive-analyses/using-anvil-for-teaching-r-bioconductor.mdx index 2dcf93880..2212b0bc7 100644 --- a/docs/learn/data-analysts/using-anvil-for-teaching-r-bioconductor.mdx +++ b/docs/learn/run-interactive-analyses/using-anvil-for-teaching-r-bioconductor.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/run-interactive-analyses" + text: "Running Interactive Analyses" description: "A case study of using AnVIL to teach R for a Biostatistics course and provides essentials for using AnVIL for other instructional efforts." title: "Using AnVIL for Teaching R / Bioconductor" --- -# Using AnVIL for teaching R / Bioconductor - Levi Waldron A case study of using AnVIL to teach R for a Biostatistics course and provides essentials for using AnVIL for other instructional efforts. @@ -14,7 +15,7 @@ A case study of using AnVIL to teach R for a Biostatistics course and provides e Notes - 1. Doing this on your own will require a billing account. We provide a billing account during the workshop, but if you're following along on your own see '[Set up Billing](/learn/data-analysts/using-anvil-for-teaching-r-bioconductor#set-up-billing)' for options to create a billing account. + 1. Doing this on your own will require a billing account. We provide a billing account during the workshop, but if you're following along on your own see '[Set up Billing](/learn/run-interactive-analyses/using-anvil-for-teaching-r-bioconductor#set-up-billing)' for options to create a billing account. 1. Access to the workspace we use requires registration; please [sign up](https://forms.gle/HCY2DM2QsuxAwdhv6) with your AnVIL email address. @@ -113,7 +114,7 @@ Steps: This course setup is GitHub-centric and uses AnVIL/Terra only as an end-user computing environment. It pulls data from GitHub or other online resources and does not use the AnVIL/Terra workspace for anything other than providing compute. -This setup is more complicated than you probably need: for most instructors, a single course GitHub repo would probably suffice, and you probably don’t need a course Docker image. In the simplest possible setup, create a single (private or public) course repo, and skip down to the section "[Post AnVIL/Terra instructions to students](/learn/data-analysts/using-anvil-for-teaching-r-bioconductor#post-anvilterra-instructions-to-students)." +This setup is more complicated than you probably need: for most instructors, a single course GitHub repo would probably suffice, and you probably don’t need a course Docker image. In the simplest possible setup, create a single (private or public) course repo, and skip down to the section "[Post AnVIL/Terra instructions to students](/learn/run-interactive-analyses/using-anvil-for-teaching-r-bioconductor#post-anvilterra-instructions-to-students)." ![GitHub Actions](/consortia/learn/data-analysts/using-anvil-github-actions.png) diff --git a/docs/learn/data-analysts/using-r-bioconductor-in-anvil.mdx b/docs/learn/run-interactive-analyses/using-r-bioconductor-in-anvil.mdx similarity index 97% rename from docs/learn/data-analysts/using-r-bioconductor-in-anvil.mdx rename to docs/learn/run-interactive-analyses/using-r-bioconductor-in-anvil.mdx index dd9a00017..217431545 100644 --- a/docs/learn/data-analysts/using-r-bioconductor-in-anvil.mdx +++ b/docs/learn/run-interactive-analyses/using-r-bioconductor-in-anvil.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/run-interactive-analyses" + text: "Running Interactive Analyses" description: "An introduction to the AnVIL cloud computing environment." title: "Using R / Bioconductor in AnVIL" --- -# Using R / Bioconductor in AnVIL - Martin Morgan An introduction to the AnVIL cloud computing environment. We learn how to create a Google account to use in AnVIL. We explore key concepts related to workspaces and billing projects. We explore creating a Jupyter notebooks-based cloud environment, and an RStudio cloud environment. @@ -14,7 +15,7 @@ An introduction to the AnVIL cloud computing environment. We learn how to create Notes - 1. The material below requires a billing account. We provide a billing account during the workshop, but if you're following along on your own see '[Next Steps](/learn/data-analysts/using-r-bioconductor-in-anvil#next-steps)' for how to create a billing account. + 1. The material below requires a billing account. We provide a billing account during the workshop, but if you're following along on your own see '[Next Steps](/learn/run-interactive-analyses/using-r-bioconductor-in-anvil#next-steps)' for how to create a billing account. 1. Access to the workspace we use requires registration; please [sign up](https://forms.gle/HCY2DM2QsuxAwdhv6) with your AnVIL email address. @@ -61,7 +62,7 @@ This week introduces the AnVIL cloud computing environment. We learn how to crea ![Customize Workspace Name](/consortia/learn/data-analysts/using-bioconductor-customize-workspace-name.png) If instead, you see a 'Billing project' that is NOT deeppilots-bioconductor, or if you see something like ![Set up Billing Message](/consortia/learn/data-analysts/using-bioconductor-billing-message.png) - then contact the workshop organizer with your AnVIL email address to be added to the deeppilots-bioconductor billing project. See the [Frequently Asked Questions](/learn/data-analysts/using-r-bioconductor-in-anvil#frequently-asked-questions), below, for more information on billing projects. + then contact the workshop organizer with your AnVIL email address to be added to the deeppilots-bioconductor billing project. See the [Frequently Asked Questions](#frequently-asked-questions), below, for more information on billing projects. - Return, via the HAMBURGER menu or by clicking on the WORKSPACES element at the top of the page, to the list of WORKSPACES available to you. You'll see your own version of the workspace. Open it. ![Open the Cloned Workspace](/consortia/learn/data-analysts/using-bioconductor-open-cloned-workspace.png) - Congratulations, you now have your own workspace associated with a billing account that allows you to perform computations in the AnVIL cloud! diff --git a/docs/learn/submit-data.mdx b/docs/learn/submit-data.mdx new file mode 100644 index 000000000..e40ff372e --- /dev/null +++ b/docs/learn/submit-data.mdx @@ -0,0 +1,21 @@ +--- +description: "View data submission guides and resources." +enableContentEnd: false +enableSupportForum: true +overview: + - label: "Data Submission Guide" + links: + - "/learn/data-submitters/submission-guide/data-submitters-overview" + - "/learn/data-submitters/submission-guide/data-approval-process" + - "/learn/data-submitters/submission-guide/set-up-a-data-model" + - "/learn/data-submitters/submission-guide/prepare-for-submission" + - "/learn/data-submitters/submission-guide/ingesting-data" + - "/learn/data-submitters/submission-guide/qc-data" + - label: "Data Submission Resources" + links: + - "/learn/data-submitters/resources/consortium-data-access-guidelines" + - "/learn/data-submitters/resources/anvil-data-withdrawal-procedures" +title: "Submitting Data" +--- + + diff --git a/docs/learn/watch-videos-and-tutorials.mdx b/docs/learn/watch-videos-and-tutorials.mdx new file mode 100644 index 000000000..05fcdc372 --- /dev/null +++ b/docs/learn/watch-videos-and-tutorials.mdx @@ -0,0 +1,24 @@ +--- +description: "AnVIL tutorials and videos are available on the AnVIL YouTube channel" +enableContentEnd: false +enableSupportForum: true +overview: + - label: "Video Gallery" + links: + - "/learn/watch-videos-and-tutorials/anvil-videos" + - "/learn/watch-videos-and-tutorials/terra-videos" + - "/learn/watch-videos-and-tutorials/dockstore-videos" + - "/learn/watch-videos-and-tutorials/galaxy-videos" + - "/learn/watch-videos-and-tutorials/seqr-videos" + - label: "MOOC" + links: + - "/learn/watch-videos-and-tutorials/what-is-anvil" + - "/learn/watch-videos-and-tutorials/cloud-computing" + - "/learn/watch-videos-and-tutorials/cloud-costs" + - "/learn/watch-videos-and-tutorials/use-case-gatk" + - "/learn/watch-videos-and-tutorials/use-case-gwas" + - "/learn/watch-videos-and-tutorials/use-case-eqtl" +title: "Videos & Tutorials" +--- + + diff --git a/docs/learn/videos/anvil-videos.mdx b/docs/learn/watch-videos-and-tutorials/anvil-videos.mdx similarity index 97% rename from docs/learn/videos/anvil-videos.mdx rename to docs/learn/watch-videos-and-tutorials/anvil-videos.mdx index dd99d8def..fc7a2e24f 100644 --- a/docs/learn/videos/anvil-videos.mdx +++ b/docs/learn/watch-videos-and-tutorials/anvil-videos.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/watch-videos-and-tutorials" + text: "Videos & Tutorials" description: "Selected videos from the AnVIL YouTube channel." title: "AnVIL Videos" --- -# AnVIL Videos - Selected videos from the [AnVIL YouTube Channel](https://www.youtube.com/channel/UCBbHCj7kUogAMFyBAzzzfUw) are listed below. ## AnVIL Shorts diff --git a/docs/learn/anvil-mooc/cloud-computing.mdx b/docs/learn/watch-videos-and-tutorials/cloud-computing.mdx similarity index 80% rename from docs/learn/anvil-mooc/cloud-computing.mdx rename to docs/learn/watch-videos-and-tutorials/cloud-computing.mdx index da6ecdc24..fafad73c6 100644 --- a/docs/learn/anvil-mooc/cloud-computing.mdx +++ b/docs/learn/watch-videos-and-tutorials/cloud-computing.mdx @@ -1,10 +1,11 @@ --- +breadcrumbs: + - path: "/learn/watch-videos-and-tutorials" + text: "Videos & Tutorials" description: "An overview of different cloud computing platforms and the state of cloud computing." -title: "AnVIL MOOC - Cloud Computing" +title: "Cloud Computing" --- -# Cloud Computing -