Skip to content

Commit

Permalink
Code update after recent main branch changes
Browse files Browse the repository at this point in the history
  • Loading branch information
scezen committed Oct 17, 2024
1 parent 73b34cb commit 41325c2
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 111 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,7 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;

import org.open4goods.config.yml.ui.VerticalConfig;
import org.open4goods.exceptions.ResourceNotFoundException;
import org.open4goods.model.BarcodeType;
import org.open4goods.model.constants.CacheConstants;
import org.open4goods.model.product.Product;
import org.open4goods.store.repository.ProductIndexationWorker;
import org.open4goods.store.repository.redis.RedisProductRepository;
import org.open4goods.commons.config.yml.IndexationConfig;
import org.open4goods.commons.config.yml.ui.VerticalConfig;
import org.open4goods.commons.exceptions.ResourceNotFoundException;
Expand Down Expand Up @@ -179,7 +173,7 @@ public Stream<Product> exportAll(BarcodeType... barcodeTypes) {
Criteria criteria = new Criteria("gtinInfos.upcType").in((Object[]) barcodeTypes);
CriteriaQuery query = new CriteriaQuery(criteria);

return elasticsearchTemplate.searchForStream(query, Product.class, current_index).stream()
return elasticsearchOperations.searchForStream(query, Product.class, current_index).stream()
.map(SearchHit::getContent);
}

Expand Down Expand Up @@ -597,20 +591,12 @@ public Long countMainIndexHavingRecentUpdate() {
return elasticsearchOperations.count(query, current_index);
}



@Cacheable(cacheNames = CacheConstants.ONE_DAY_LOCAL_CACHE_NAME)
public long countItemsByBarcodeType(BarcodeType... barcodeTypes) {
Criteria criteria = new Criteria("gtinInfos.upcType").in((Object[]) barcodeTypes);
CriteriaQuery query = new CriteriaQuery(criteria);
return elasticsearchTemplate.count(query, current_index);
return elasticsearchOperations.count(query, current_index);
}
@Cacheable(cacheNames = CacheConstants.ONE_HOUR_LOCAL_CACHE_NAME)
public Long countMainIndexHavingPrice() {
CriteriaQuery query = new CriteriaQuery(getValidDateQuery());
return elasticsearchTemplate.count(query, current_index);
}


@Cacheable(keyGenerator = CacheConstants.KEY_GENERATOR, cacheNames = CacheConstants.ONE_HOUR_LOCAL_CACHE_NAME)
public Map<Integer, Long> byTaxonomy() {
Expand Down
4 changes: 1 addition & 3 deletions ui/src/main/java/org/open4goods/ui/config/AppConfig.java
Original file line number Diff line number Diff line change
Expand Up @@ -195,9 +195,7 @@ IcecatService icecatFeatureService(UiConfig properties, RemoteFileCachingService


@Bean
OpenDataService openDataService(@Autowired ProductRepository aggregatedDataRepository,
@Autowired UiConfig props,
@Autowired OpenDataConfig openDataConfig) {
OpenDataService openDataService(@Autowired ProductRepository aggregatedDataRepository, @Autowired UiConfig props, @Autowired OpenDataConfig openDataConfig) {
return new OpenDataService(aggregatedDataRepository, props, openDataConfig);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
import java.util.List;

import org.apache.commons.io.IOUtils;
import org.open4goods.exceptions.TechnicalException;
import org.open4goods.ui.config.yml.UiConfig;
import org.open4goods.ui.controllers.ui.UiService;
import org.open4goods.ui.services.OpenDataService;
Expand All @@ -19,8 +18,6 @@
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.servlet.ModelAndView;

import com.mashape.unirest.http.exceptions.UnirestException;

import cz.jiripinkas.jsitemapgenerator.ChangeFreq;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
Expand Down
93 changes: 4 additions & 89 deletions ui/src/main/java/org/open4goods/ui/services/OpenDataService.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,25 +14,18 @@

import io.micrometer.core.annotation.Timed;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.open4goods.commons.dao.ProductRepository;
import org.open4goods.commons.exceptions.TechnicalException;
import org.open4goods.commons.helper.ThrottlingInputStream;
import org.open4goods.commons.model.constants.CacheConstants;
import org.open4goods.commons.model.product.AggregatedAttribute;
import org.open4goods.commons.model.product.Product;
import org.open4goods.dao.ProductRepository;
import org.open4goods.exceptions.TechnicalException;
import org.open4goods.helper.ThrottlingInputStream;
import org.open4goods.model.BarcodeType;
import org.open4goods.model.constants.CacheConstants;
import org.open4goods.model.product.AggregatedAttribute;
import org.open4goods.model.product.Product;
import org.open4goods.ui.config.OpenDataConfig;
import org.open4goods.ui.config.yml.UiConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.health.Health;
import org.springframework.boot.actuate.health.HealthIndicator;
import org.springframework.cache.annotation.Cacheable;
Expand Down Expand Up @@ -78,7 +71,7 @@ public class OpenDataService implements HealthIndicator {
public OpenDataService(ProductRepository aggregatedDataRepository, UiConfig uiConfig, OpenDataConfig openDataConfig){
this.aggregatedDataRepository = aggregatedDataRepository;
this.uiConfig = uiConfig;
this.openDataConfig = openDataConfig;
this.openDataConfig = openDataConfig;
}

/**
Expand Down Expand Up @@ -131,7 +124,7 @@ public void close() throws IOException {
* This method is scheduled to run periodically.
* TODO : Schedule in conf
*/
@Scheduled(initialDelay = 1000L * 3600, fixedDelay = 1000L * 3600 * 24 * 7)
//@Scheduled(initialDelay = 1000L * 3600, fixedDelay = 1000L * 3600 * 24 * 7)
@Timed(value = "OpenDataService.generateOpendata.time", description = "Time taken to generate the OpenData ZIP files", extraTags = {"service", "OpenDataService"})
public void generateOpendata() {

Expand All @@ -140,15 +133,7 @@ public void generateOpendata() {
return;
}

exportRunning.set(true);
return;
}

ZipOutputStream zos = null;
FileOutputStream fos = null;

try {
uiConfig.tmpOpenDataFile().getParentFile().mkdirs();
prepareDirectories();
processDataFiles();
moveTmpFilesToFinalDestination();
Expand All @@ -168,27 +153,6 @@ private void prepareDirectories() throws IOException {
uiConfig.tmpGtinZipFile().getParentFile().mkdirs();
}

private void processDataFiles() throws IOException {
LOGGER.info("Starting process for ISBN_13");
processAndCreateZip(ISBN_DATASET_FILENAME, BarcodeType.ISBN_13, uiConfig.tmpIsbnZipFile());

LOGGER.info("Starting process for GTIN/EAN");
processAndCreateZip(GTIN_DATASET_FILENAME, BarcodeType.ISBN_13, uiConfig.tmpGtinZipFile(), true);
}

private void moveTmpFilesToFinalDestination() throws IOException {
moveFile(uiConfig.tmpIsbnZipFile(), uiConfig.isbnZipFile());
moveFile(uiConfig.tmpGtinZipFile(), uiConfig.gtinZipFile());
}

private void moveFile(File src, File dest) throws IOException {
if (dest.exists()) {
FileUtils.deleteQuietly(dest);
}
FileUtils.moveFile(src, dest);
}


/**
* Processes and creates the ZIP files for the opendata.
*/
Expand Down Expand Up @@ -218,8 +182,6 @@ private void moveFile(File src, File dest) throws IOException {
FileUtils.moveFile(src, dest);
}

fos = new FileOutputStream(uiConfig.tmpOpenDataFile());
zos = new ZipOutputStream(fos);
private void processAndCreateZip(String filename, BarcodeType barcodeType, File zipFile) throws IOException {
processAndCreateZip(filename, barcodeType, zipFile, false);
}
Expand Down Expand Up @@ -251,17 +213,8 @@ private void processAndCreateZip(String filename, BarcodeType barcodeType, File
types = new BarcodeType[]{BarcodeType.ISBN_13};
}

// Process ISBN_13
LOGGER.info("Starting process for ISBN_13");
processAndAddToZip(zos, "open4goods-isbn-dataset.csv", BarcodeType.ISBN_13);

// Process GTIN/EAN (excluding ISBN_13)
LOGGER.info("Starting process for GTIN/EAN excluding ISBN_13");
processAndAddToZip(zos, "open4goods-gtin-dataset.csv", BarcodeType.ISBN_13, true);
AtomicLong count = new AtomicLong();

zos.close();
fos.close();
aggregatedDataRepository.exportAll(types)
.forEach(e -> {
count.incrementAndGet();
Expand All @@ -271,46 +224,8 @@ private void processAndCreateZip(String filename, BarcodeType barcodeType, File
writer.flush();
zos.closeEntry();

// Moving the tmp file
if (uiConfig.openDataFile().exists()) {
FileUtils.deleteQuietly(uiConfig.openDataFile());
}
FileUtils.moveFile(uiConfig.tmpOpenDataFile(), uiConfig.openDataFile());

LOGGER.info("Opendata CSV files generated and zipped successfully.");
LOGGER.info("{} rows exported in {}.", count.get(), filename);

} catch (Exception e) {
LOGGER.error("Error while generating opendata set", e);
} finally {
IOUtils.closeQuietly(zos);
IOUtils.closeQuietly(fos);
exportRunning.set(false);
LOGGER.error("Error during processing of {}: {}", filename, e.getMessage());
}
}

private void processAndAddToZip(ZipOutputStream zos, String filename, BarcodeType barcodeType) throws IOException {
processAndAddToZip(zos, filename, barcodeType, false);
}

private void processAndAddToZip(ZipOutputStream zos, String filename, BarcodeType barcodeType, boolean invertCondition) throws IOException {
ZipEntry entry = new ZipEntry(filename);
zos.putNextEntry(entry);
CSVWriter writer = new CSVWriter(new OutputStreamWriter(zos));
writer.writeNext(header);

AtomicLong count = new AtomicLong();
try {
aggregatedDataRepository.exportAll().filter(e ->
invertCondition ? !e.getGtinInfos().getUpcType().equals(barcodeType) : e.getGtinInfos().getUpcType().equals(barcodeType)
).forEach(e -> {
count.incrementAndGet();
writer.writeNext(toEntry(e));
});
writer.flush();
zos.closeEntry();
LOGGER.info("{} rows exported in {}.", count.get(), filename);
} catch (Exception e) {
LOGGER.error("Error during processing of {}: {}", filename, e.getMessage());
}
Expand Down Expand Up @@ -477,4 +392,4 @@ public static String humanReadableByteCountBin(long bytes) {
}
return String.format("%.1f %cB", bytes / 1000.0, ci.current());
}
}
}

0 comments on commit 41325c2

Please sign in to comment.