Skip to content

Commit

Permalink
fix: CQDG-00 fix bug with previous PR
Browse files Browse the repository at this point in the history
  • Loading branch information
adipaul1981 committed Oct 17, 2024
1 parent ea94ec3 commit b8fc691
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,16 @@ class BiospecimenCentric(studyIds: List[String])(implicit configuration: Configu
override def extract(lastRunDateTime: LocalDateTime = minDateTime,
currentRunDateTime: LocalDateTime = LocalDateTime.now())(implicit spark: SparkSession): Map[String, DataFrame] = {

(Seq(normalized_biospecimen, normalized_drs_document_reference, simple_participant, es_index_study_centric, normalized_sequencing_experiment, normalized_sample_registration, es_index_file_centric)
(Seq(normalized_drs_document_reference, simple_participant, es_index_study_centric,
normalized_sequencing_experiment, normalized_sample_registration, es_index_file_centric)
.map(ds => ds.id ->
ds.read
ds.read.where(col("study_id").isin(studyIds: _*))) ++
Seq(
ncit_terms.id -> ncit_terms.read,
normalized_biospecimen.id -> normalized_biospecimen.read
.where(col("study_id").isin(studyIds: _*))
.where(col("security") =!= "R")
) ++ Seq(ncit_terms.id -> ncit_terms.read)).toMap
)).toMap
}

override def transform(data: Map[String, DataFrame],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,15 @@ class FileCentric(studyIds: List[String])(implicit configuration: Configuration)

override def extract(lastRunDateTime: LocalDateTime = minDateTime,
currentRunDateTime: LocalDateTime = LocalDateTime.now())(implicit spark: SparkSession): Map[String, DataFrame] = {
(Seq(normalized_drs_document_reference, normalized_biospecimen, simple_participant, es_index_study_centric, normalized_sequencing_experiment, normalized_sample_registration)
.map(ds => ds.id ->
ds.read
(Seq(normalized_biospecimen, simple_participant, es_index_study_centric,
normalized_sequencing_experiment, normalized_sample_registration)
.map(ds => ds.id -> ds.read.where(col("study_id").isin(studyIds: _*))) ++
Seq(
ncit_terms.id -> ncit_terms.read,
normalized_drs_document_reference.id -> normalized_drs_document_reference.read
.where(col("study_id").isin(studyIds: _*))
.where(col("security") =!= "R")
) ++ Seq(ncit_terms.id -> ncit_terms.read)).toMap
)).toMap
}

override def transform(data: Map[String, DataFrame],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,10 @@ class ParticipantCentric(studyIds: List[String])(implicit configuration: Configu

override def extract(lastRunDateTime: LocalDateTime = minDateTime,
currentRunDateTime: LocalDateTime = LocalDateTime.now())(implicit spark: SparkSession): Map[String, DataFrame] = {
(Seq(simple_participant, normalized_drs_document_reference, normalized_biospecimen, normalized_sequencing_experiment, normalized_sample_registration, es_index_study_centric)
.map(ds => ds.id ->
ds.read
.where(col("study_id").isin(studyIds: _*))
.where(col("security") =!= "R")
) ++ Seq(ncit_terms.id -> ncit_terms.read)).toMap
(Seq(simple_participant, normalized_drs_document_reference, normalized_biospecimen, normalized_sequencing_experiment,
normalized_sample_registration, es_index_study_centric)
.map(ds => ds.id -> ds.read.where(col("study_id").isin(studyIds: _*))) ++
Seq(ncit_terms.id -> ncit_terms.read)).toMap
}

override def transform(data: Map[String, DataFrame],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@ class SimpleParticipant(studyIds: List[String])(implicit configuration: Configur
normalized_group, normalized_family_relationship, normalized_researchstudy)
.map(ds => ds.id -> ds.read.where(col("study_id").isin(studyIds: _*))
) ++ Seq(
normalized_patient.id -> normalized_patient.read
.where(col("study_id").isin(studyIds: _*))
.where(col("security") =!= "R"),
hpo_terms.id -> hpo_terms.read,
mondo_terms.id -> mondo_terms.read,
icd_terms.id -> icd_terms.read,
Expand Down

0 comments on commit b8fc691

Please sign in to comment.