Skip to content

Commit

Permalink
0.2.0 (#111)
Browse files Browse the repository at this point in the history
* Simplify build gradle version (#108)

* Simplify build gradle version

* Simplify build gradle version

- readme

* ColumnList Sorting (#110)

- js -> java

* Apply Table Import  (#116)

* Table import

* Query Editor Column List (#112)

* Query Editor Column List

- apply sorting(with partition, clustering key)

* modify js file
  • Loading branch information
akageun authored Nov 13, 2024
1 parent 6f63c06 commit eff9669
Show file tree
Hide file tree
Showing 26 changed files with 614 additions and 256 deletions.
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

Cassdio is centralized web management system for managing [Apache Cassandra](https://cassandra.apache.org/_/index.html)!

Cassdio provide powerful tools to efficiently manage and monitor Apache Cassandra databases. You can monitor the
Cassdio provides powerful tools to efficiently manage and monitor Apache Cassandra databases. You can monitor the
real-time status of your database clusters and perform various tasks intuitively through a user-friendly interface.

This management system helps simplify system operations and enhance stability. It's an essential tool for managing
Expand Down Expand Up @@ -45,9 +45,9 @@ Apache Cassandra databases, offering excellent functionality and user convenienc
### Installation

```
wget https://github.com/hakdang/cassdio/releases/download/v0.1.0/cassdio-0.1.0.jar
https://github.com/hakdang/cassdio/releases/latest/download/cassdio.jar
java -jar ./cassdio-0.1.0.jar
java -jar ./cassdio.jar
```

### Browser
Expand All @@ -72,5 +72,5 @@ java -jar ./cassdio-0.1.0.jar

## License

Cassdio is Open Source software released under
Cassdio is open-source software released under
the [Apache 2.0 license](https://www.apache.org/licenses/LICENSE-2.0.html).
3 changes: 2 additions & 1 deletion cassdio-core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,12 @@ dependencies {
// Json
implementation("org.springframework.boot:spring-boot-starter-json")
implementation("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.17.1")

implementation("org.apache.commons:commons-csv:1.9.0")
// CommonsLang3
api("org.apache.commons:commons-lang3:3.13.0")
api("com.google.guava:guava:33.0.0-jre")
api("org.apache.commons:commons-collections4:4.4")
api("org.apache.commons:commons-csv:1.9.0")

// Cache
implementation("org.springframework.boot:spring-boot-starter-cache")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
package kr.hakdang.cassdio.common.utils;

/**
* CsvHelper
*
* @author akageun
* @since 2024-08-19
*/
public class CsvHelper {
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
package kr.hakdang.cassdio.core.domain.cluster.keyspace.table;

import lombok.extern.slf4j.Slf4j;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
* ClusterTableCsvProvider
*
* @author akageun
* @since 2024-08-08
*/
@Slf4j
@Service
public class ClusterCsvProvider {

public void importerCsvSampleDownload(Writer writer, List<String> headerList) {
CSVFormat csvFormat = CSVFormat.DEFAULT.builder()
.setHeader(headerList.toArray(String[]::new))
.build();

try (final CSVPrinter printer = new CSVPrinter(writer, csvFormat)) {
log.info("create complete importer csv sample");

printer.flush();
} catch (IOException e) {
throw new RuntimeException(e);
}

}

public List<Map<String, Object>> importCsvReader(Reader reader, List<String> columnList) throws IOException {
CSVFormat csvFormat = CSVFormat.DEFAULT.builder()
.setHeader(columnList.toArray(String[]::new))
.setSkipHeaderRecord(true)
.setTrim(true)
.build();

Iterable<CSVRecord> records = csvFormat.parse(reader);
//Validation 방식에 대해 고민 필요

List<Map<String, Object>> values = new ArrayList<>();

for (CSVRecord record : records) {

Map<String, Object> map = new HashMap<>();

for (String column : columnList) {
map.put(column, StringUtils.defaultIfBlank(record.get(column), ""));
}

values.add(map);
}

return values;
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
package kr.hakdang.cassdio.core.domain.cluster.keyspace.table;

import lombok.extern.slf4j.Slf4j;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.io.Writer;
import java.util.List;

/**
* ClusterTableCsvProvider
*
* @author akageun
* @since 2024-08-08
*/
@Slf4j
@Service
public class ClusterTableCsvProvider {

public void importerCsvSampleDownload(Writer writer, List<String> sortedColumnList) {
CSVFormat csvFormat = CSVFormat.DEFAULT.builder()
.setHeader(sortedColumnList.toArray(String[]::new))
.build();

try (final CSVPrinter printer = new CSVPrinter(writer, csvFormat)) {
//printer.printRecord(author, title);
log.info("create complete importer csv sample");
} catch (IOException e) {
throw new RuntimeException(e);
}
}


}
Original file line number Diff line number Diff line change
@@ -1,18 +1,27 @@
package kr.hakdang.cassdio.core.domain.cluster.keyspace.table;

import com.datastax.oss.driver.api.core.CqlSession;
import com.datastax.oss.driver.api.core.cql.BatchStatement;
import com.datastax.oss.driver.api.core.cql.DefaultBatchType;
import com.datastax.oss.driver.api.core.cql.ResultSet;
import com.datastax.oss.driver.api.core.cql.SimpleStatement;
import com.datastax.oss.driver.api.querybuilder.QueryBuilder;
import com.datastax.oss.driver.api.querybuilder.insert.InsertInto;
import com.datastax.oss.driver.api.querybuilder.insert.JsonInsert;
import com.datastax.oss.protocol.internal.util.Bytes;
import com.google.common.collect.Lists;
import kr.hakdang.cassdio.common.utils.Jsons;
import kr.hakdang.cassdio.core.domain.cluster.BaseClusterCommander;
import kr.hakdang.cassdio.core.domain.cluster.CqlSessionSelectResults;
import kr.hakdang.cassdio.core.domain.cluster.keyspace.CassdioColumnDefinition;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;

import java.time.Duration;
import java.util.List;
import java.util.Map;

/**
* ClusterTableRowCommander
Expand Down Expand Up @@ -42,4 +51,26 @@ public CqlSessionSelectResults rowSelect(String clusterId, TableDTO.ClusterTable
resultSet.getExecutionInfo().getPagingState()
);
}

public void rowInserts(TableDTO.ClusterTableRowImportArgs args, List<Map<String, Object>> values) {
if (CollectionUtils.isEmpty(values)) {
return;
}

CqlSession session = cqlSessionFactory.get(args.getClusterId());

for (List<Map<String, Object>> list : Lists.partition(values, args.getPerCommitSize())) {
BatchStatement batchStatement = BatchStatement.newInstance(args.getBatchType());

for (Map<String, Object> map : list) {
batchStatement = batchStatement.add(
QueryBuilder.insertInto(args.getKeyspace(), args.getTable())
.json(Jsons.toJson(map))
.build()
);
}

session.execute(batchStatement);
}
}
}
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
package kr.hakdang.cassdio.core.domain.cluster.keyspace.table;

import com.datastax.oss.driver.api.core.cql.BatchType;
import com.datastax.oss.driver.api.core.cql.DefaultBatchType;
import io.micrometer.common.util.StringUtils;
import lombok.AccessLevel;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.ToString;
import org.apache.commons.lang3.EnumUtils;

/**
* ClusterTableArgs
Expand Down Expand Up @@ -99,4 +102,31 @@ public ClusterTableRowArgs(String keyspace, String table, int pageSize, int time
}
}

@ToString
@Getter
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public static class ClusterTableRowImportArgs {
private String clusterId;
private String keyspace;
private String table;

private BatchType batchType;
private int perCommitSize = 50;

@Builder
public ClusterTableRowImportArgs(
String clusterId,
String keyspace,
String table,
String batchTypeCode,
int perCommitSize
) {
this.clusterId = clusterId;
this.keyspace = keyspace;
this.table = table;
this.batchType = EnumUtils.getEnum(DefaultBatchType.class, batchTypeCode);
this.perCommitSize = perCommitSize;
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,11 @@
import org.springframework.stereotype.Service;

import java.time.Duration;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.bindMarker;
import static java.util.Collections.emptyList;
Expand All @@ -39,28 +43,50 @@ public CqlSessionSelectResults columnList(String clusterId, String keyspace, Str
public CqlSessionSelectResults columnList(String clusterId, String keyspace, String table, List<String> columnList) {
CqlSession session = cqlSessionFactory.get(clusterId);

SimpleStatement statement;

Select select = getColumnTable(session, keyspace)
.all()
.whereColumn(CassandraSystemTablesColumn.TABLES_KEYSPACE_NAME.getColumnName()).isEqualTo(bindMarker())
.whereColumn(CassandraSystemTablesColumn.TABLES_TABLE_NAME.getColumnName()).isEqualTo(bindMarker());

// if (CollectionUtils.isNotEmpty(columnList)) {
// select.whereColumn("column_name").in(columnList.stream().map(info -> bindMarker()).toList());
// }
List<String> arr = new ArrayList<>();
arr.add(keyspace);
arr.add(table);

if (CollectionUtils.isNotEmpty(columnList)) {
select = select.whereColumn("column_name").in(columnList.stream()
.map(info -> bindMarker())
.collect(Collectors.toSet()));

statement = select.build(keyspace, table)
arr.addAll(columnList);
}

SimpleStatement statement = select.build(arr.toArray())
.setTimeout(Duration.ofSeconds(3));

ResultSet resultSet = session.execute(statement);

List<Map<String, Object>> rows = convertRows(session, resultSet)
.stream()
.peek(row -> row.put("sortValue", makeSortValue(row)))
.sorted(Comparator.comparing(row -> String.valueOf(row.get("sortValue"))))
.toList();

return CqlSessionSelectResults.of(
convertRows(session, resultSet),
rows,
CassdioColumnDefinition.makes(resultSet.getColumnDefinitions())
);
}

public List<String> columnSortedList(String clusterId, String keyspace, String table) {
CqlSessionSelectResults results = columnList(clusterId, keyspace, table);
return results.getRows().stream().map(row -> String.valueOf(row.get("column_name"))).collect(Collectors.toList());
}

private String makeSortValue(Map<String, Object> row) {
ColumnKind columnKind = ColumnKind.findByCode(String.valueOf(row.get("kind")));
return String.format("%s-%s", columnKind.getOrder(), row.get("position"));
}

private SelectFrom getColumnTable(CqlSession session, String keyspace) {
if (ClusterUtils.isVirtualKeyspace(session.getContext(), keyspace)) {
return QueryBuilder
Expand Down

This file was deleted.

Loading

0 comments on commit eff9669

Please sign in to comment.