Skip to content

Commit

Permalink
issue #1159: code restyle
Browse files Browse the repository at this point in the history
  • Loading branch information
michele-bastianelli-smc authored and mrk-vi committed Jan 13, 2025
1 parent 996bf66 commit 04ae26d
Showing 1 changed file with 132 additions and 137 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -44,15 +44,92 @@
import java.util.List;
import java.util.Set;

;

@ApplicationScoped
public class AnalyzerService extends BaseK9EntityService<Analyzer, AnalyzerDTO> {
@Inject
CharFilterService _charFilterService;
@Inject
TokenFilterService _tokenFilterService;
@Inject
TokenizerService _tokenizerService;

AnalyzerService(AnalyzerMapper mapper) {
this.mapper = mapper;
}

public Uni<List<Analyzer>> findUnboundAnalyzersByTokenFilter(long tokenFilterId) {
public Uni<Tuple2<Analyzer, CharFilter>> addCharFilterToAnalyzer(long id, long charFilterId) {

return sessionFactory.withTransaction((s, tr) -> findById(s, id)
.onItem()
.ifNotNull()
.transformToUni(analyzer -> _charFilterService.findById(s, charFilterId)
.onItem()
.ifNotNull()
.transformToUni(charFilter ->
s.fetch(analyzer.getCharFilters())
.onItem()
.ifNotNull()
.transformToUni(charFilters -> {

if (charFilters.add(charFilter)) {

analyzer.setCharFilters(charFilters);

return persist(s, analyzer)
.map(newSC -> Tuple2.of(newSC, charFilter));
}

return Uni.createFrom().nullItem();

})
)
));
}

public Uni<Tuple2<Analyzer, TokenFilter>> addTokenFilterToAnalyzer(
long id, long tokenFilterId) {

return sessionFactory.withTransaction((s, tr) -> findById(s, id)
.onItem()
.ifNotNull()
.transformToUni(analyzer -> _tokenFilterService.findById(s, tokenFilterId)
.onItem()
.ifNotNull()
.transformToUni(tokenFilter ->
s.fetch(analyzer.getTokenFilters())
.onItem()
.ifNotNull()
.transformToUni(tokenFilters -> {

if (tokenFilters.add(tokenFilter)) {

analyzer.setTokenFilters(tokenFilters);

return persist(s, analyzer)
.map(newSC -> Tuple2.of(newSC, tokenFilter));
}

return Uni.createFrom().nullItem();

})
)
));
}

public Uni<Tuple2<Analyzer, Tokenizer>> bindTokenizer(long analyzerId, long tokenizerId) {
return sessionFactory.withTransaction((s, tr) -> findById(s, analyzerId)
.onItem()
.ifNotNull()
.transformToUni(analyzer -> _tokenizerService.findById(s, tokenizerId)
.onItem()
.ifNotNull()
.transformToUni(tokenizer -> {
analyzer.setTokenizer(tokenizer);
return persist(s, analyzer).map(t -> Tuple2.of(t, tokenizer));
})));
}

public Uni<List<Analyzer>> findUnboundAnalyzersByCharFilter(long charFilterId) {
return sessionFactory.withTransaction(s -> {
CriteriaBuilder cb = sessionFactory.getCriteriaBuilder();

Expand All @@ -64,12 +141,12 @@ public Uni<List<Analyzer>> findUnboundAnalyzersByTokenFilter(long tokenFilterId)
Subquery<Long> idsToExcludeQuery = criteriaQuery.subquery(Long.class);
Root<Analyzer> rootAnalyzerToExclude = idsToExcludeQuery.from(Analyzer.class);

Join<Analyzer, TokenFilter> tokenFilterJoinToExclude =
rootAnalyzerToExclude.join(Analyzer_.tokenFilters, JoinType.INNER);
Join<Analyzer, CharFilter> charFilterJoinToExclude =
rootAnalyzerToExclude.join(Analyzer_.charFilters, JoinType.INNER);

idsToExcludeQuery
.select(rootAnalyzerToExclude.get(Analyzer_.id))
.where(cb.equal(tokenFilterJoinToExclude.get(TokenFilter_.id), tokenFilterId));
.where(cb.equal(charFilterJoinToExclude.get(CharFilter_.id), charFilterId));

criteriaQuery.where(
cb.not(rootAnalyzer.get(Analyzer_.id).in(idsToExcludeQuery)));
Expand All @@ -78,7 +155,7 @@ public Uni<List<Analyzer>> findUnboundAnalyzersByTokenFilter(long tokenFilterId)
});
}

public Uni<List<Analyzer>> findUnboundAnalyzersByCharFilter(long charFilterId) {
public Uni<List<Analyzer>> findUnboundAnalyzersByTokenFilter(long tokenFilterId) {
return sessionFactory.withTransaction(s -> {
CriteriaBuilder cb = sessionFactory.getCriteriaBuilder();

Expand All @@ -90,12 +167,12 @@ public Uni<List<Analyzer>> findUnboundAnalyzersByCharFilter(long charFilterId) {
Subquery<Long> idsToExcludeQuery = criteriaQuery.subquery(Long.class);
Root<Analyzer> rootAnalyzerToExclude = idsToExcludeQuery.from(Analyzer.class);

Join<Analyzer, CharFilter> charFilterJoinToExclude =
rootAnalyzerToExclude.join(Analyzer_.charFilters, JoinType.INNER);
Join<Analyzer, TokenFilter> tokenFilterJoinToExclude =
rootAnalyzerToExclude.join(Analyzer_.tokenFilters, JoinType.INNER);

idsToExcludeQuery
.select(rootAnalyzerToExclude.get(Analyzer_.id))
.where(cb.equal(charFilterJoinToExclude.get(CharFilter_.id), charFilterId));
.where(cb.equal(tokenFilterJoinToExclude.get(TokenFilter_.id), tokenFilterId));

criteriaQuery.where(
cb.not(rootAnalyzer.get(Analyzer_.id).in(idsToExcludeQuery)));
Expand All @@ -104,9 +181,18 @@ public Uni<List<Analyzer>> findUnboundAnalyzersByCharFilter(long charFilterId) {
});
}

@Override
public Class<Analyzer> getEntityClass() {return Analyzer.class;} ;
public Uni<Connection<CharFilter>> getCharFilters(
Long id, String after, String before, Integer first, Integer last,
String searchText, Set<SortBy> sortByList, boolean notEqual) {

return findJoinConnection(
id, Analyzer_.CHAR_FILTERS, CharFilter.class,
_charFilterService.getSearchFields(), after, before, first,
last, searchText, sortByList, notEqual);
}

@Override
public Class<Analyzer> getEntityClass() {return Analyzer.class;}

@Override
public String[] getSearchFields() {
Expand All @@ -123,63 +209,41 @@ public Uni<Connection<TokenFilter>> getTokenFilters(
last, searchText, sortByList, notEqual);
}

public Uni<Connection<CharFilter>> getCharFilters(
Long id, String after, String before, Integer first, Integer last,
String searchText, Set<SortBy> sortByList, boolean notEqual) {

return findJoinConnection(
id, Analyzer_.CHAR_FILTERS, CharFilter.class,
_charFilterService.getSearchFields(), after, before, first,
last, searchText, sortByList, notEqual);
}

public Uni<Tokenizer> getTokenizer(long analyzerId) {
return sessionFactory.withTransaction(s -> findById(s, analyzerId)
.flatMap(analyzer -> s.fetch(analyzer.getTokenizer())));
}

public Uni<Tuple2<Analyzer, TokenFilter>> addTokenFilterToAnalyzer(
long id, long tokenFilterId) {

return sessionFactory.withTransaction((s, tr) -> findById(s, id)
.onItem()
.ifNotNull()
.transformToUni(analyzer -> _tokenFilterService.findById(s, tokenFilterId)
.onItem()
.ifNotNull()
.transformToUni(tokenFilter ->
s.fetch(analyzer.getTokenFilters())
.onItem()
.ifNotNull()
.transformToUni(tokenFilters -> {

if (tokenFilters.add(tokenFilter)) {
public Uni<Void> load(Analyzer analyzer) {
return sessionFactory.withTransaction(s -> {

analyzer.setTokenFilters(tokenFilters);
List<Uni<?>> unis = new ArrayList<>();

return persist(s, analyzer)
.map(newSC -> Tuple2.of(newSC, tokenFilter));
}
unis.add(s.fetch(analyzer.getTokenizer()));
unis.add(s.fetch(analyzer.getCharFilters()));
unis.add(s.fetch(analyzer.getTokenFilters()));

return Uni.createFrom().nullItem();
return Uni.combine()
.all()
.unis(unis)
.usingConcurrencyOf(1)
.collectFailures()
.discardItems();

})
)
));
});
}

public Uni<Tuple2<Analyzer, TokenFilter>> removeTokenFilterToAnalyzer(
long id, long tokenFilterId) {

public Uni<Tuple2<Analyzer, CharFilter>> removeCharFilterFromAnalyzer(
long id, long charFilterId) {
return sessionFactory.withTransaction((s, tr) -> findById(s, id)
.onItem()
.ifNotNull()
.transformToUni(analyzer -> s.fetch(analyzer.getTokenFilters())
.transformToUni(analyzer -> s.fetch(analyzer.getCharFilters())
.onItem()
.ifNotNull()
.transformToUni(tokenFilters -> {
.transformToUni(charFilters -> {

if (analyzer.removeTokenFilter(tokenFilters, tokenFilterId)) {
if (analyzer.removeCharFilter(charFilters, charFilterId)) {

return persist(s, analyzer)
.map(newSC -> Tuple2.of(newSC, null));
Expand All @@ -190,18 +254,17 @@ public Uni<Tuple2<Analyzer, TokenFilter>> removeTokenFilterToAnalyzer(
})));
}

public Uni<Analyzer> removeTokenFilterListFromAnalyzer(
long analyzerId) {
public Uni<Analyzer> removeCharFilterListFromAnalyzer(long analyzerId) {
return sessionFactory.withTransaction((s, tr) -> findById(s, analyzerId)
.onItem()
.ifNotNull()
.transformToUni(analyzer -> s.fetch(analyzer.getTokenFilters())
.transformToUni(analyzer -> s.fetch(analyzer.getCharFilters())
.onItem()
.ifNotNull()
.transformToUni(tokenFilters -> {
.transformToUni(charFilters -> {

if(!tokenFilters.isEmpty()){
tokenFilters.clear();
if(!charFilters.isEmpty()){
charFilters.clear();
return persist(s, analyzer);
};

Expand All @@ -210,17 +273,18 @@ public Uni<Analyzer> removeTokenFilterListFromAnalyzer(
})));
}

public Uni<Analyzer> removeCharFilterListFromAnalyzer(long analyzerId) {
public Uni<Analyzer> removeTokenFilterListFromAnalyzer(
long analyzerId) {
return sessionFactory.withTransaction((s, tr) -> findById(s, analyzerId)
.onItem()
.ifNotNull()
.transformToUni(analyzer -> s.fetch(analyzer.getCharFilters())
.transformToUni(analyzer -> s.fetch(analyzer.getTokenFilters())
.onItem()
.ifNotNull()
.transformToUni(charFilters -> {
.transformToUni(tokenFilters -> {

if(!charFilters.isEmpty()){
charFilters.clear();
if(!tokenFilters.isEmpty()){
tokenFilters.clear();
return persist(s, analyzer);
};

Expand All @@ -229,46 +293,18 @@ public Uni<Analyzer> removeCharFilterListFromAnalyzer(long analyzerId) {
})));
}

public Uni<Tuple2<Analyzer, CharFilter>> addCharFilterToAnalyzer(long id, long charFilterId) {

return sessionFactory.withTransaction((s, tr) -> findById(s, id)
.onItem()
.ifNotNull()
.transformToUni(analyzer -> _charFilterService.findById(s, charFilterId)
.onItem()
.ifNotNull()
.transformToUni(charFilter ->
s.fetch(analyzer.getCharFilters())
.onItem()
.ifNotNull()
.transformToUni(charFilters -> {

if (charFilters.add(charFilter)) {

analyzer.setCharFilters(charFilters);

return persist(s, analyzer)
.map(newSC -> Tuple2.of(newSC, charFilter));
}

return Uni.createFrom().nullItem();

})
)
));
}
public Uni<Tuple2<Analyzer, TokenFilter>> removeTokenFilterToAnalyzer(
long id, long tokenFilterId) {

public Uni<Tuple2<Analyzer, CharFilter>> removeCharFilterFromAnalyzer(
long id, long charFilterId) {
return sessionFactory.withTransaction((s, tr) -> findById(s, id)
.onItem()
.ifNotNull()
.transformToUni(analyzer -> s.fetch(analyzer.getCharFilters())
.transformToUni(analyzer -> s.fetch(analyzer.getTokenFilters())
.onItem()
.ifNotNull()
.transformToUni(charFilters -> {
.transformToUni(tokenFilters -> {

if (analyzer.removeCharFilter(charFilters, charFilterId)) {
if (analyzer.removeTokenFilter(tokenFilters, tokenFilterId)) {

return persist(s, analyzer)
.map(newSC -> Tuple2.of(newSC, null));
Expand All @@ -279,19 +315,6 @@ public Uni<Tuple2<Analyzer, CharFilter>> removeCharFilterFromAnalyzer(
})));
}

public Uni<Tuple2<Analyzer, Tokenizer>> bindTokenizer(long analyzerId, long tokenizerId) {
return sessionFactory.withTransaction((s, tr) -> findById(s, analyzerId)
.onItem()
.ifNotNull()
.transformToUni(analyzer -> _tokenizerService.findById(s, tokenizerId)
.onItem()
.ifNotNull()
.transformToUni(tokenizer -> {
analyzer.setTokenizer(tokenizer);
return persist(s, analyzer).map(t -> Tuple2.of(t, tokenizer));
})));
}

public Uni<Tuple2<Analyzer, Tokenizer>> unbindTokenizer(long analyzerId) {
return sessionFactory.withTransaction((s, tr) -> findById(s, analyzerId)
.onItem()
Expand All @@ -302,32 +325,4 @@ public Uni<Tuple2<Analyzer, Tokenizer>> unbindTokenizer(long analyzerId) {
}));
}

public Uni<Void> load(Analyzer analyzer) {
return sessionFactory.withTransaction(s -> {

List<Uni<?>> unis = new ArrayList<>();

unis.add(s.fetch(analyzer.getTokenizer()));
unis.add(s.fetch(analyzer.getCharFilters()));
unis.add(s.fetch(analyzer.getTokenFilters()));

return Uni.combine()
.all()
.unis(unis)
.usingConcurrencyOf(1)
.collectFailures()
.discardItems();

});
}


@Inject
TokenFilterService _tokenFilterService;
@Inject
TokenizerService _tokenizerService;
@Inject
CharFilterService _charFilterService;


}

0 comments on commit 04ae26d

Please sign in to comment.