Skip to content

Commit

Permalink
Update to new ODF schema
Browse files Browse the repository at this point in the history
  • Loading branch information
sergiimk committed Aug 22, 2020
1 parent f672311 commit 81ac055
Show file tree
Hide file tree
Showing 8 changed files with 22 additions and 19 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [0.23.0] - 2020-08-22
### Changed
- Updated to latest ODF schema

## [0.22.0] - 2020-07-15
### Added
- `#26`: Follow redirects when fetching data from URL
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,12 @@ class AddInteractiveCommand(
"Compression",
"What's the compression format?",
Seq("zip", "gzip")
)
) match {
case "zip" => CompressionFormat.Zip
case "gzip" => CompressionFormat.Gzip
}

val subPath = if (Seq("zip").contains(compression)) {
val subPath = if (compression == CompressionFormat.Zip) {
inputOptional(
"Sub-path",
"If this archive can contain multiple files - specify the path regex to " +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class DependencyGraphCommand(

datasets.foreach(
ds =>
ds.datasetDependencies
ds.dependencies
.foreach(d => println(s"${quote(d)} -> ${quote(ds.id)};"))
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,14 +103,14 @@ class SourceFactory(systemClock: Clock) {
.toVector

val orderBy = kind.order.getOrElse(
if (kind.eventTime.isDefined) SourceOrdering.ByEventTime()
else SourceOrdering.ByName()
if (kind.eventTime.isDefined) SourceOrdering.ByEventTime
else SourceOrdering.ByName
)

val sorted = orderBy match {
case SourceOrdering.ByName() =>
case SourceOrdering.ByName =>
globbed.sortBy(_.path.getFileName.toString.toLowerCase())
case SourceOrdering.ByEventTime() =>
case SourceOrdering.ByEventTime =>
globbed.sortBy(eventTimeSource.getEventTime)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

package dev.kamu.cli.ingest.prep

import dev.kamu.core.manifests.{PrepStep => PrepStepCfg}
import dev.kamu.core.manifests.{PrepStep => PrepStepCfg, CompressionFormat}
import org.apache.logging.log4j.LogManager

class PrepStepFactory() {
Expand All @@ -19,17 +19,13 @@ class PrepStepFactory() {
): PrepStep = {
config match {
case dc: PrepStepCfg.Decompress =>
dc.format.toLowerCase match {
case "gzip" =>
dc.format match {
case CompressionFormat.Gzip =>
logger.debug("Extracting gzip")
new DecompressGZIPStep()
case "zip" =>
case CompressionFormat.Zip =>
logger.debug("Extracting zip")
new DecompressZIPStep(dc)
case _ =>
throw new NotImplementedError(
s"Unknown compression format: ${dc.format}"
)
}
case pipe: PrepStepCfg.Pipe =>
new ProcessPipeStep(pipe.command)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class MetadataChain(datasetDir: Path) {
val initialSummary = DatasetSummary(
id = ds.id,
kind = ds.kind,
datasetDependencies = ds.dependsOn.toSet,
dependencies = ds.dependsOn.toSet,
vocab = ds.vocab,
lastPulled = None,
numRecords = 0,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ class MetadataRepository(
if (isRemote(id))
List.empty
else
getDatasetSummary(id).datasetDependencies.toList
getDatasetSummary(id).dependencies.toList
}

def getDatasetsInDependencyOrder(
Expand Down Expand Up @@ -212,7 +212,7 @@ class MetadataRepository(
// Validate references
val referencedBy = getAllDatasets()
.map(getDatasetSummary)
.filter(_.datasetDependencies.contains(id))
.filter(_.dependencies.contains(id))

if (referencedBy.nonEmpty)
throw new DanglingReferenceException(referencedBy.map(_.id), id)
Expand Down

0 comments on commit 81ac055

Please sign in to comment.