Skip to content

Commit 0964a22

Browse files
authored
Merge pull request #107 from xtreme1-io/dev
Release v0.6.0
2 parents 8e81625 + 834cdd2 commit 0964a22

File tree

336 files changed

+22568
-2042
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

336 files changed

+22568
-2042
lines changed

.ops/.gitlab-ci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ stages:
55

66
variables:
77
APP_NAME: "xtreme1"
8-
APP_VERSION: "0.5.6"
8+
APP_VERSION: "0.6.0"
99

1010
FRONTEND_PACKAGE_DIR: "dist"
1111
BACKEND_PACKAGE_NAME: "${APP_NAME}-backend-${APP_VERSION}-SNAPSHOT.jar"

.ops/backend.dockerfile

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
11
FROM openjdk:11
22

33
RUN apt update && \
4-
apt install -y iputils-ping curl wget netcat
5-
4+
apt install -y iputils-ping curl wget netcat python3 python3-pip
5+
RUN pip3 install --upgrade --force-reinstall git+https://github.com/xtreme1-io/xtreme1-sdk.git@97f0e90
66
WORKDIR /app
77
COPY target/$BACKEND_PACKAGE_NAME ./app.jar
88
RUN mkdir -p config
9+
RUN wget 'https://basicai-asset.s3.us-west-2.amazonaws.com/xtreme1/xtreme1-lidar-fusion-trial.zip' -O xtreme1-lidar-fusion-trial.zip
10+
RUN wget 'https://basicai-asset.s3.us-west-2.amazonaws.com/xtreme1/xtreme1-image-trial.zip' -O xtreme1-image-trial.zip
911

1012
EXPOSE 8080
1113

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
<div align="center">
22
<img width="386" alt="Xtreme1 logo" src="https://user-images.githubusercontent.com/84139543/190300943-98da7d5c-bd67-4074-a94f-b7405d29fb90.png">
33

4-
![](https://img.shields.io/badge/Release-v0.5.6-green)
4+
![](https://img.shields.io/badge/Release-v0.6.0-green)
55
![](https://img.shields.io/badge/License-Apache%202.0-blueviolet)
66
[![Slack](https://img.shields.io/badge/Join-Slack-orange.svg?logo=slack)](https://join.slack.com/t/xtreme1group/shared_invite/zt-1jhk36uzr-NpdpYXeQAEHN6rYJy5_6pg)
77
[![Twitter](https://img.shields.io/badge/Follow-Twitter-blue)](https://twitter.com/Xtreme1io)
@@ -58,8 +58,8 @@ Image Data Curation (Visualizing & Debug) - [MobileNetV3](https://github.com/xi
5858
Download the latest release package and unzip it.
5959

6060
```bash
61-
wget https://github.com/xtreme1-io/xtreme1/releases/download/v0.5.6/xtreme1-v0.5.6.zip
62-
unzip -d xtreme1-v0.5.6 xtreme1-v0.5.6.zip
61+
wget https://github.com/xtreme1-io/xtreme1/releases/download/v0.6.0/xtreme1-v0.6.0.zip
62+
unzip -d xtreme1-v0.6.0 xtreme1-v0.6.0.zip
6363
```
6464

6565
## Start all services

backend/Dockerfile

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,12 @@ RUN --mount=type=cache,target=/root/.m2 mvn package
66

77
FROM openjdk:11-jre
88
RUN apt update && \
9-
apt install -y iputils-ping curl wget netcat
9+
apt install -y iputils-ping curl wget netcat python3 python3-pip git
10+
RUN pip3 install --upgrade --force-reinstall git+https://github.com/xtreme1-io/xtreme1-sdk.git@97f0e90
1011
WORKDIR /app
11-
COPY --from=build /build/target/xtreme1-backend-0.5.6-SNAPSHOT.jar ./app.jar
12+
COPY --from=build /build/target/xtreme1-backend-0.6.0-SNAPSHOT.jar ./app.jar
1213
RUN mkdir -p config
1314
RUN wget 'https://basicai-asset.s3.us-west-2.amazonaws.com/xtreme1/xtreme1-lidar-fusion-trial.zip' -O xtreme1-lidar-fusion-trial.zip
15+
RUN wget 'https://basicai-asset.s3.us-west-2.amazonaws.com/xtreme1/xtreme1-image-trial.zip' -O xtreme1-image-trial.zip
1416
EXPOSE 8080
1517
CMD java -jar app.jar

backend/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ cd backend
9292
mvn package
9393

9494
# Using local configuration to start application.
95-
java -Dspring.profiles.active=local -jar target/xtreme1-backend-0.5.6-SNAPSHOT.jar
95+
java -Dspring.profiles.active=local -jar target/xtreme1-backend-0.6.0-SNAPSHOT.jar
9696
```
9797

9898
Now you can access the backend service at `http://localhost:8080/`.

backend/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313

1414
<groupId>ai.basic</groupId>
1515
<artifactId>xtreme1-backend</artifactId>
16-
<version>0.5.6-SNAPSHOT</version>
16+
<version>0.6.0-SNAPSHOT</version>
1717
<name>Xtreme1 Backend</name>
1818
<description></description>
1919

backend/src/main/java/ai/basic/x1/adapter/api/config/CommonConfig.java

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -186,4 +186,11 @@ public DatasetSimilarityJobUseCase datasetSimilarityJobUseCase() {
186186
public ModelRecognitionUseCase modelRecognitionUseCase() {
187187
return new ModelRecognitionUseCase();
188188
}
189+
190+
@Bean
191+
public ModelRunRecordUseCase modelRunRecordUseCase() {
192+
return new ModelRunRecordUseCase();
193+
}
194+
195+
189196
}

backend/src/main/java/ai/basic/x1/adapter/api/config/DatasetInitialInfo.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
import lombok.Builder;
66
import lombok.Data;
77
import lombok.NoArgsConstructor;
8+
import lombok.experimental.SuperBuilder;
89
import org.springframework.boot.context.properties.ConfigurationProperties;
910
import org.springframework.stereotype.Component;
1011

@@ -14,11 +15,9 @@
1415
* @author fyb
1516
*/
1617
@Data
17-
@Builder
18+
@SuperBuilder
1819
@NoArgsConstructor
1920
@AllArgsConstructor
20-
@Component
21-
@ConfigurationProperties(prefix = "dataset-initial.dataset")
2221
public class DatasetInitialInfo {
2322

2423
/**
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
package ai.basic.x1.adapter.api.config;
2+
3+
import lombok.Data;
4+
import lombok.NoArgsConstructor;
5+
import lombok.experimental.SuperBuilder;
6+
import org.springframework.boot.context.properties.ConfigurationProperties;
7+
import org.springframework.stereotype.Component;
8+
9+
/**
10+
* @author fyb
11+
*/
12+
@Data
13+
@SuperBuilder
14+
@NoArgsConstructor
15+
@Component
16+
@ConfigurationProperties(prefix = "dataset-initial.dataset.image")
17+
public class ImageDatasetInitialInfo extends DatasetInitialInfo{
18+
19+
}

backend/src/main/java/ai/basic/x1/adapter/api/config/JobConfig.java

Lines changed: 65 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,6 @@
11
package ai.basic.x1.adapter.api.config;
22

3-
import ai.basic.x1.adapter.api.job.ModelJobConsumerListener;
4-
import ai.basic.x1.adapter.api.job.ModelRunErrorHandler;
5-
import ai.basic.x1.adapter.api.job.PreLabelModelMessageHandler;
6-
import ai.basic.x1.adapter.api.job.PredictImageCo80ModelHandler;
3+
import ai.basic.x1.adapter.api.job.*;
74
import org.springframework.context.ApplicationContext;
85
import org.springframework.context.annotation.Bean;
96
import org.springframework.context.annotation.Configuration;
@@ -34,12 +31,25 @@
3431
public class JobConfig {
3532
private static final int PROCESSORS = Runtime.getRuntime().availableProcessors();
3633
@Bean
37-
public Executor redisStreamExecutor() {
34+
public Executor dataRedisStreamExecutor() {
3835
AtomicInteger index = new AtomicInteger(1);
3936
ThreadPoolExecutor executor = new ThreadPoolExecutor(PROCESSORS, PROCESSORS, 0, TimeUnit.SECONDS,
4037
new LinkedBlockingDeque<>(), r -> {
4138
Thread thread = new Thread(r);
42-
thread.setName("redisConsumer-executor" + index.getAndIncrement());
39+
thread.setName("dataRedisConsumer-executor" + index.getAndIncrement());
40+
thread.setDaemon(true);
41+
return thread;
42+
});
43+
return executor;
44+
}
45+
46+
@Bean
47+
public Executor datasetRedisStreamExecutor() {
48+
AtomicInteger index = new AtomicInteger(1);
49+
ThreadPoolExecutor executor = new ThreadPoolExecutor(PROCESSORS, PROCESSORS, 0, TimeUnit.SECONDS,
50+
new LinkedBlockingDeque<>(), r -> {
51+
Thread thread = new Thread(r);
52+
thread.setName("datasetRedisConsumer-executor" + index.getAndIncrement());
4353
thread.setDaemon(true);
4454
return thread;
4555
});
@@ -60,22 +70,22 @@ public Executor similarityExecutor() {
6070
}
6171

6272
@Bean(initMethod = "start", destroyMethod = "stop")
63-
public StreamMessageListenerContainer<String, ObjectRecord<String, String>> streamMessageListenerContainer(Executor redisStreamExecutor,
73+
public StreamMessageListenerContainer<String, ObjectRecord<String, String>> dataStreamMessageListenerContainer(Executor dataRedisStreamExecutor,
6474
RedisConnectionFactory redisConnectionFactory,
6575
RedisTemplate redisTemplate,
6676
ApplicationContext applicationContext
6777
) {
6878

6979
try {
70-
redisTemplate.opsForStream().createGroup(MODEL_RUN_STREAM_KEY, MODEL_RUN_CONSUMER_GROUP);
80+
redisTemplate.opsForStream().createGroup(DATA_MODEL_RUN_STREAM_KEY, MODEL_RUN_CONSUMER_GROUP);
7181
} catch (RedisSystemException redisSystemException) {
7282
//no do
7383
}
7484
StreamMessageListenerContainer.StreamMessageListenerContainerOptions<String, ObjectRecord<String, String>> options =
7585
StreamMessageListenerContainer.StreamMessageListenerContainerOptions
7686
.builder()
7787
.batchSize(10)
78-
.executor(redisStreamExecutor)
88+
.executor(dataRedisStreamExecutor)
7989
.keySerializer(RedisSerializer.string())
8090
.hashKeySerializer(RedisSerializer.string())
8191
.hashValueSerializer(RedisSerializer.string())
@@ -87,24 +97,63 @@ public StreamMessageListenerContainer<String, ObjectRecord<String, String>> stre
8797
.build();
8898
StreamMessageListenerContainer<String, ObjectRecord<String, String>> streamMessageListenerContainer =
8999
StreamMessageListenerContainer.create(redisConnectionFactory, options);
90-
StreamMessageListenerContainer.ConsumerStreamReadRequest<String> streamReadRequest = StreamMessageListenerContainer
100+
StreamMessageListenerContainer.ConsumerStreamReadRequest<String> dataStreamReadRequest = StreamMessageListenerContainer
91101
.StreamReadRequest
92-
.builder(StreamOffset.create(MODEL_RUN_STREAM_KEY, ReadOffset.lastConsumed()))
102+
.builder(StreamOffset.create(DATA_MODEL_RUN_STREAM_KEY, ReadOffset.lastConsumed()))
93103
.consumer(Consumer.from(MODEL_RUN_CONSUMER_GROUP, MODEL_RUN_CONSUMER_NAME))
94104
.autoAcknowledge(false)
95105
.cancelOnError(throwable -> false)
96106
.build();
97-
streamMessageListenerContainer.register(streamReadRequest, new ModelJobConsumerListener(MODEL_RUN_STREAM_KEY, MODEL_RUN_CONSUMER_GROUP, redisTemplate, applicationContext));
107+
streamMessageListenerContainer.register(dataStreamReadRequest, new DataModelJobConsumerListener(DATA_MODEL_RUN_STREAM_KEY, MODEL_RUN_CONSUMER_GROUP, redisTemplate, applicationContext));
108+
return streamMessageListenerContainer;
109+
}
110+
111+
@Bean(initMethod = "start", destroyMethod = "stop")
112+
public StreamMessageListenerContainer<String, ObjectRecord<String, String>> streamMessageListenerContainerDataset(Executor datasetRedisStreamExecutor,
113+
RedisConnectionFactory redisConnectionFactory,
114+
RedisTemplate redisTemplate,
115+
ApplicationContext applicationContext
116+
) {
117+
try {
118+
redisTemplate.opsForStream().createGroup(DATASET_MODEL_RUN_STREAM_KEY, DATASET_MODEL_RUN_CONSUMER_GROUP);
119+
} catch (RedisSystemException redisSystemException) {
120+
//no do
121+
}
122+
StreamMessageListenerContainer.StreamMessageListenerContainerOptions<String, ObjectRecord<String, String>> options =
123+
StreamMessageListenerContainer.StreamMessageListenerContainerOptions
124+
.builder()
125+
.batchSize(10)
126+
.executor(datasetRedisStreamExecutor)
127+
.keySerializer(RedisSerializer.string())
128+
.hashKeySerializer(RedisSerializer.string())
129+
.hashValueSerializer(RedisSerializer.string())
130+
// less than `spring.redis.timeout`
131+
.pollTimeout(Duration.ofSeconds(1))
132+
.objectMapper(new ObjectHashMapper())
133+
.errorHandler(new ModelRunErrorHandler())
134+
.targetType(String.class)
135+
.build();
136+
StreamMessageListenerContainer<String, ObjectRecord<String, String>> streamMessageListenerContainer =
137+
StreamMessageListenerContainer.create(redisConnectionFactory, options);
138+
139+
StreamMessageListenerContainer.ConsumerStreamReadRequest<String> datasetStreamReadRequest = StreamMessageListenerContainer
140+
.StreamReadRequest
141+
.builder(StreamOffset.create(DATASET_MODEL_RUN_STREAM_KEY, ReadOffset.lastConsumed()))
142+
.consumer(Consumer.from(DATASET_MODEL_RUN_CONSUMER_GROUP, DATASET_MODEL_RUN_CONSUMER_NAME))
143+
.autoAcknowledge(false)
144+
.cancelOnError(throwable -> false)
145+
.build();
146+
streamMessageListenerContainer.register(datasetStreamReadRequest, new DatasetModelJobConsumerListener(DATASET_MODEL_RUN_STREAM_KEY, DATASET_MODEL_RUN_CONSUMER_GROUP, redisTemplate, applicationContext));
98147
return streamMessageListenerContainer;
99148
}
100149

101150
@Bean
102-
public PreLabelModelMessageHandler preLabelModelMessageHandler() {
103-
return new PreLabelModelMessageHandler();
151+
public PointCloudDetectionModelMessageHandler pointCloudDetectionModelMessageHandler() {
152+
return new PointCloudDetectionModelMessageHandler();
104153
}
105154

106155
@Bean
107-
public PredictImageCo80ModelHandler predictImageCo80ModelHandler() {
108-
return new PredictImageCo80ModelHandler();
156+
public ImageDetectionModelHandler imageDetectionModelHandler() {
157+
return new ImageDetectionModelHandler();
109158
}
110159
}
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
package ai.basic.x1.adapter.api.config;
2+
3+
import lombok.Data;
4+
import lombok.NoArgsConstructor;
5+
import lombok.experimental.SuperBuilder;
6+
import org.springframework.boot.context.properties.ConfigurationProperties;
7+
import org.springframework.stereotype.Component;
8+
9+
/**
10+
* @author fyb
11+
*/
12+
@Data
13+
@SuperBuilder
14+
@NoArgsConstructor
15+
@Component
16+
@ConfigurationProperties(prefix = "dataset-initial.dataset.point-cloud")
17+
public class PointCloudDatasetInitialInfo extends DatasetInitialInfo{
18+
19+
}

backend/src/main/java/ai/basic/x1/adapter/api/controller/DataInfoController.java

Lines changed: 43 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,29 +2,35 @@
22

33
import ai.basic.x1.adapter.api.annotation.user.LoggedUser;
44
import ai.basic.x1.adapter.dto.*;
5+
import ai.basic.x1.adapter.dto.request.DataInfoSplitFilterDTO;
6+
import ai.basic.x1.adapter.dto.request.DataInfoSplitReqDTO;
57
import ai.basic.x1.adapter.exception.ApiException;
6-
import ai.basic.x1.entity.DataInfoQueryBO;
7-
import ai.basic.x1.entity.DataInfoUploadBO;
8-
import ai.basic.x1.entity.DataPreAnnotationBO;
9-
import ai.basic.x1.entity.ScenarioQueryBO;
8+
import ai.basic.x1.adapter.port.rpc.dto.DatasetModelResultDTO;
9+
import ai.basic.x1.entity.*;
1010
import ai.basic.x1.entity.enums.ModelCodeEnum;
1111
import ai.basic.x1.entity.enums.ScenarioQuerySourceEnum;
12+
import ai.basic.x1.entity.enums.SplitTargetDataTypeEnum;
13+
import ai.basic.x1.entity.enums.SplitTypeEnum;
1214
import ai.basic.x1.usecase.*;
1315
import ai.basic.x1.usecase.exception.UsecaseCode;
1416
import ai.basic.x1.util.DefaultConverter;
1517
import ai.basic.x1.util.ModelParamUtils;
1618
import ai.basic.x1.util.Page;
1719
import cn.hutool.core.collection.CollectionUtil;
1820
import cn.hutool.core.util.EnumUtil;
21+
import cn.hutool.core.util.StrUtil;
1922
import cn.hutool.json.JSONObject;
2023
import cn.hutool.json.JSONUtil;
24+
import lombok.extern.slf4j.Slf4j;
2125
import org.springframework.beans.factory.annotation.Autowired;
2226
import org.springframework.validation.annotation.Validated;
2327
import org.springframework.web.bind.annotation.*;
2428

2529
import javax.validation.constraints.NotEmpty;
2630
import javax.validation.constraints.NotNull;
31+
import java.io.BufferedReader;
2732
import java.io.IOException;
33+
import java.io.InputStreamReader;
2834
import java.util.Collections;
2935
import java.util.List;
3036
import java.util.stream.Collectors;
@@ -36,6 +42,7 @@
3642
@RestController
3743
@RequestMapping("/data/")
3844
@Validated
45+
@Slf4j
3946
public class DataInfoController extends BaseDatasetController {
4047

4148
@Autowired
@@ -147,9 +154,35 @@ public DataAnnotationRecordDTO findDataIdsByRecordId(@PathVariable Long id, @Log
147154
return DefaultConverter.convert(dataAnnotationRecordBO, DataAnnotationRecordDTO.class);
148155
}
149156

157+
@GetMapping("findLockRecordByDatasetId")
158+
public List<LockRecordDTO> findLockRecordByDatasetId(@NotNull(message = "datasetId cannot be null") @RequestParam(required = false) Long datasetId) {
159+
return DefaultConverter.convert(dataAnnotationRecordUseCase.findLockRecordByDatasetId(datasetId), LockRecordDTO.class);
160+
}
161+
162+
@PostMapping("unLockByLockRecordIds")
163+
public void unLockByLockRecordIds(@RequestBody @Validated DataBatchUnlockDTO dataBatchUnlockDTO) {
164+
dataAnnotationRecordUseCase.unLockByLockRecordIds(dataBatchUnlockDTO.getLockRecordIds());
165+
}
166+
167+
@PostMapping("split/dataIds")
168+
public void splitByDataIds(@RequestBody @Validated DataInfoSplitReqDTO dto) {
169+
dataInfoUsecase.splitByDataIds(dto.getDataIds(), EnumUtil.fromString(SplitTypeEnum.class, dto.getSplitType()));
170+
}
171+
172+
@PostMapping("split/filter")
173+
public void splitByFilter(@RequestBody @Validated DataInfoSplitFilterDTO dto) {
174+
dataInfoUsecase.splitByFilter(DefaultConverter.convert(dto, DataInfoSplitFilterBO.class));
175+
}
176+
177+
@GetMapping("split/totalDataCount")
178+
public Long getSplitDataTotalCount(@NotNull(message = "datasetId cannot be null") @RequestParam(required = false) Long datasetId,
179+
@RequestParam(value = "targetDataType", required = false) SplitTargetDataTypeEnum targetDataType) {
180+
return dataInfoUsecase.getSplitDataTotalCount(datasetId, targetDataType);
181+
}
182+
150183
@PostMapping("deleteBatch")
151184
public void deleteBatch(@RequestBody @Validated DataInfoDeleteDTO dto) {
152-
dataInfoUsecase.deleteBatch(dto.getDatasetId(),dto.getIds());
185+
dataInfoUsecase.deleteBatch(dto.getDatasetId(), dto.getIds());
153186
}
154187

155188
@GetMapping("generatePresignedUrl")
@@ -251,4 +284,9 @@ public JSONObject getDataAndResult(@NotNull(message = "cannot be null") @Request
251284
return JSONUtil.parseObj(JSONUtil.toJsonStr(dataInfoUsecase.getDataAndResult(datasetId, dataIds)));
252285
}
253286

287+
@GetMapping("getDataModelRunResult/{dataId}")
288+
public List<DatasetModelResultDTO> getDataModelRunResult(@PathVariable Long dataId) {
289+
return DefaultConverter.convert(dataAnnotationObjectUseCase.getDataModelRunResult(dataId), DatasetModelResultDTO.class);
290+
}
291+
254292
}

0 commit comments

Comments
 (0)