Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Try to rework unique validator to make it possible to validate count … #39

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,43 @@ public void testFail3rdCommandWhenDuplicationIsWithinTheBulk() {
assertThat(failedCommands(results), contains(commands.get(2)));
}

@Test
public void testSuccessCommandWhenDuplicationIsWithinTheBulk() {
UniqueKey<ParentEntity> uniqueness = new UniqueKey<>(asList(ParentEntity.NAME));
UniquenessValidator<ParentEntity> validator = new UniquenessValidator.Builder<>(entitiesFetcher, uniqueness)
.setMaxCount(2)
.build();

List<CreateParent> commands = asList(
new CreateParent().with(ParentEntity.ID, 1).with(ParentEntity.NAME, "moshe"),
new CreateParent().with(ParentEntity.ID, 2).with(ParentEntity.NAME, "david"),
new CreateParent().with(ParentEntity.ID, 3).with(ParentEntity.NAME, "moshe")
);

CreateResult<ParentEntity, Identifier<ParentEntity>> results = parentPersistence.create(commands, parentFlow(validator).build());

assertFalse(results.hasErrors());
}

@Test
public void testSuccessCommandWhenKeyAlreadyExistsInDb() {
UniqueKey<ParentEntity> uniqueness = new UniqueKey<>(asList(ParentEntity.NAME));
UniquenessValidator<ParentEntity> validator = new UniquenessValidator.Builder<>(entitiesFetcher, uniqueness)
.setMaxCount(2)
.build();

create(new CreateParent().with(ParentEntity.ID, 99).with(ParentEntity.NAME, "moshe"));

List<CreateParent> commands = asList(
new CreateParent().with(ParentEntity.ID, 1).with(ParentEntity.NAME, "moshe"),
new CreateParent().with(ParentEntity.ID, 2).with(ParentEntity.NAME, "david")
);

CreateResult<ParentEntity, Identifier<ParentEntity>> results = parentPersistence.create(commands, parentFlow(validator).build());

assertFalse(results.hasErrors());
}

@Test
public void testFailCommandWhenKeyAlreadyExistsInDb() {
UniqueKey<ParentEntity> uniqueness = new UniqueKey<>(asList(ParentEntity.NAME));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.function.BinaryOperator;
import java.util.function.Predicate;
Expand All @@ -18,8 +19,9 @@
import static com.kenshoo.pl.entity.ChangeOperation.UPDATE;
import static com.kenshoo.pl.entity.SupportedChangeOperation.CREATE;
import static java.util.Objects.requireNonNull;
import static java.util.function.Function.identity;
import static java.util.stream.Collectors.toMap;
import static java.util.stream.Collectors.groupingBy;
import static java.util.stream.Collectors.toList;
import static org.jooq.lambda.Seq.seq;


public class UniquenessValidator<E extends EntityType<E>> implements ChangesValidator<E> {
Expand All @@ -29,47 +31,61 @@ public class UniquenessValidator<E extends EntityType<E>> implements ChangesVali
private final UniqueKey<E> uniqueKey;
private final PLCondition condition;
private final SupportedChangeOperation operation;
private final int maxCount;

private UniquenessValidator(EntitiesFetcher fetcher, UniqueKey<E> uniqueKey, SupportedChangeOperation operation, PLCondition condition, String errorCode) {
private UniquenessValidator(EntitiesFetcher fetcher, UniqueKey<E> uniqueKey, SupportedChangeOperation operation, PLCondition condition, String errorCode, int maxCount) {
this.errorCode = errorCode;
this.fetcher = requireNonNull(fetcher, "entity fetcher must be provided");
this.uniqueKey = requireNonNull(uniqueKey, "unique key must be provided");
this.condition = requireNonNull(condition, "condition must be provided");
this.operation = requireNonNull(operation, "operation must be provided");
this.maxCount = maxCount;
}

@Override
public SupportedChangeOperation getSupportedChangeOperation() {
return operation;
}


@Override
public void validate(Collection<? extends EntityChange<E>> commands, ChangeOperation op, ChangeContext ctx) {

final var commandsToValidate = commands.stream().filter(hasChangeInUniqueKeyField()).collect(Collectors.toList());
Map<Identifier<E>, ? extends EntityChange<E>> commandsByIds = markDuplicatesInCollectionWithErrors(commandsToValidate, ctx);
if (commandsByIds.isEmpty())
var groupedCommands = groupByUniqueKey(commandsToValidate, ctx);

groupedCommands.forEach((eIdentifier, entityChanges) ->
entityChanges.stream()
.skip(maxCount)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

skip is a clever idea :)
but lets not add errors yet because we also add them later, and we don't need to add the same error twice.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i think we should delete it from list, like it was before. As i understand in existed logic on first stage we check duplicates in commands, remove it and that compare with db what's left

.forEach(eEntityChange -> ctx.addValidationError(eEntityChange, new ValidationError(errorCode))));
if (groupedCommands.isEmpty())
return;

UniqueKey<E> pk = uniqueKey.getEntityType().getPrimaryKey();
EntityField<E, ?>[] uniqueKeyAndPK = ArrayUtils.addAll(uniqueKey.getFields(), pk.getFields());

Map<Identifier<E>, CurrentEntityState> duplicates = fetcher.fetch(uniqueKey.getEntityType(), commandsByIds.keySet(), condition, uniqueKeyAndPK)
Map<Identifier<E>, List<CurrentEntityState>> duplicates = fetcher.fetch(uniqueKey.getEntityType(), groupedCommands.keySet(), condition, uniqueKeyAndPK)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

"duplicates" is not a good name anymore.
we can call them "itemsInDB"

.stream()
.collect(toMap(e -> createKeyValue(e, uniqueKey), identity()));

duplicates.forEach((dupKey, dupEntity) -> ctx.addValidationError(commandsByIds.get(dupKey), errorForDatabaseConflict(dupEntity, pk)));
.collect(groupingBy(e -> createKeyValue(e, uniqueKey), toList()));

duplicates.forEach((eIdentifier, currentEntityStates) -> {
List<? extends EntityChange<E>> identifierCommands = groupedCommands.get(eIdentifier);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we can delete the "in same bulk" detection (line 57) and have a single check here:

groupedCommands.forEach( (group, cmds) -> {
    int maxAllowedInBulk = maxCount - itemsFromDB.get(group).size() ;
    // now do the "skip" trick with maxAllowedInBulk
};

Copy link
Author

@AUsachov AUsachov Jun 23, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we'll get wrong error messages. I don't know is it critical, but when we have duplicate in db existed code should pass db id of duplicate. So here we should iterate over itemsFromDB

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

One more interesting question - what should be in error message when we have 3 items in db, maxCount is 3 and few items in commands list?

if (currentEntityStates.size() + identifierCommands.size() > maxCount) {
currentEntityStates.forEach(currentEntityState -> {
identifierCommands.forEach(eEntityChange -> ctx.addValidationError(eEntityChange, errorForDatabaseConflict(currentEntityState, pk)));
});
}
});
}

private Predicate<EntityChange<E>> hasChangeInUniqueKeyField() {
return cmd -> !UPDATE.equals(cmd.getChangeOperation()) || Arrays.stream(uniqueKey.getFields()).anyMatch(cmd::isFieldChanged);
}

private Map<Identifier<E>, EntityChange<E>> markDuplicatesInCollectionWithErrors(Collection<? extends EntityChange<E>> commands, ChangeContext ctx) {
return commands
.stream()
private <CMD extends EntityChange<E>> Map<Identifier<E>, List<CMD>> groupByUniqueKey(Collection<CMD> commands, ChangeContext ctx) {
return seq(commands)
.filter(command -> condition.getPostFetchCondition().test(ctx.getFinalEntity(command)))
.collect(toMap(cmd -> createKeyValue(cmd, ctx, uniqueKey), identity(), fail2ndConflictingCommand(ctx)));
.groupBy(cmd -> uniqueKey.createIdentifier(new EntityWithNullForMissingField(ctx.getFinalEntity(cmd))));
}

private ValidationError errorForDatabaseConflict(CurrentEntityState dupEntity, UniqueKey<E> pk) {
Expand Down Expand Up @@ -103,12 +119,18 @@ public static class Builder<E extends EntityType<E>> {
private final UniqueKey<E> uniqueKey;
private PLCondition condition = PLCondition.trueCondition();
private SupportedChangeOperation operation = CREATE;
private int maxCount = 1;

public Builder(EntitiesFetcher fetcher, UniqueKey<E> uniqueKey) {
this.fetcher = fetcher;
this.uniqueKey = uniqueKey;
}

public Builder<E> setMaxCount(int maxCount) {
this.maxCount = maxCount;
return this;
}

public Builder<E> setOperation(SupportedChangeOperation operation) {
this.operation = operation;
return this;
Expand All @@ -125,7 +147,7 @@ public Builder<E> setErrorCode(String errorCode) {
}

public UniquenessValidator<E> build() {
return new UniquenessValidator<>(fetcher, uniqueKey, operation, condition, errorCode);
return new UniquenessValidator<>(fetcher, uniqueKey, operation, condition, errorCode, maxCount);
}
}

Expand Down