Skip to content

Commit

Permalink
other: merge main
Browse files Browse the repository at this point in the history
  • Loading branch information
hjxilinx committed Dec 17, 2024
2 parents 779c1bd + 7db6f86 commit d2146a2
Show file tree
Hide file tree
Showing 708 changed files with 37,856 additions and 9,532 deletions.
10 changes: 10 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ tests/examples/JDBC/JDBCDemo/.classpath
tests/examples/JDBC/JDBCDemo/.project
tests/examples/JDBC/JDBCDemo/.settings/
source/libs/parser/inc/sql.*
source/os/src/timezone/
tests/script/tmqResult.txt
tests/system-test/case_to_run.txt
tests/develop-test/case_to_run.txt
Expand Down Expand Up @@ -162,3 +163,12 @@ geos_c.h
source/libs/parser/src/sql.c
include/common/ttokenauto.h
!packaging/smokeTest/pytest_require.txt
tdengine-test-dir/
localtime.c
private.h
strftime.c
tzdir.h
tzfile.h
coverage.info
taos
taosd
1 change: 1 addition & 0 deletions Jenkinsfile2
Original file line number Diff line number Diff line change
Expand Up @@ -402,6 +402,7 @@ pipeline {
WKDIR = '/var/lib/jenkins/workspace'
WK = '/var/lib/jenkins/workspace/TDinternal'
WKC = '/var/lib/jenkins/workspace/TDinternal/community'
WKPY = '/var/lib/jenkins/workspace/taos-connector-python'
DOC_WKC = '/root/doc_ci_work'
td_repo = 'TDengine'
zh_doc_repo = 'docs.taosdata.com'
Expand Down
9 changes: 6 additions & 3 deletions cmake/cmake.define
Original file line number Diff line number Diff line change
Expand Up @@ -97,10 +97,13 @@ ELSE()
SET(TD_TAOS_TOOLS TRUE)
ENDIF()

SET(TAOS_LIB taos)
SET(TAOS_LIB_STATIC taos_static)

IF(${TD_WINDOWS})
SET(TAOS_LIB taos_static)
SET(TAOS_LIB_PLATFORM_SPEC taos_static)
ELSE()
SET(TAOS_LIB taos)
SET(TAOS_LIB_PLATFORM_SPEC taos)
ENDIF()

# build TSZ by default
Expand Down Expand Up @@ -128,7 +131,7 @@ IF(TD_WINDOWS)
SET(COMMON_FLAGS "/w /D_WIN32 /DWIN32 /Zi /MTd")
ENDIF()

SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /MANIFEST:NO")
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /MANIFEST:NO /FORCE:MULTIPLE")

# IF (MSVC AND (MSVC_VERSION GREATER_EQUAL 1900))
# SET(COMMON_FLAGS "${COMMON_FLAGS} /Wv:18")
Expand Down
2 changes: 1 addition & 1 deletion cmake/libuv_CMakeLists.txt.in
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# libuv
ExternalProject_Add(libuv
GIT_REPOSITORY https://github.com/libuv/libuv.git
GIT_TAG v1.48.0
GIT_TAG v1.49.2
SOURCE_DIR "${TD_CONTRIB_DIR}/libuv"
BINARY_DIR "${TD_CONTRIB_DIR}/libuv"
CONFIGURE_COMMAND ""
Expand Down
15 changes: 15 additions & 0 deletions cmake/tz_CMakeLists.txt.in
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# timezone
ExternalProject_Add(tz
GIT_REPOSITORY https://github.com/eggert/tz.git
GIT_TAG main
SOURCE_DIR "${TD_CONTRIB_DIR}/tz"
BINARY_DIR ""
CONFIGURE_COMMAND ""
#BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
GIT_SHALLOW true
GIT_PROGRESS true
BUILD_COMMAND ""
)

33 changes: 33 additions & 0 deletions contrib/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,10 @@ cat("${TD_SUPPORT_DIR}/zlib_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
# cJson
cat("${TD_SUPPORT_DIR}/cjson_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})

if(NOT ${TD_WINDOWS})
cat("${TD_SUPPORT_DIR}/tz_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
endif(NOT ${TD_WINDOWS})

# xz
# cat("${TD_SUPPORT_DIR}/xz_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})

Expand Down Expand Up @@ -651,6 +655,35 @@ if(${TD_LINUX} AND ${BUILD_WITH_S3})
add_subdirectory(azure-cmake EXCLUDE_FROM_ALL)
endif()

IF(TD_LINUX)
SET(TZ_OUTPUT_PATH /usr/share/zoneinfo)
ELSEIF(TD_DARWIN)
SET(TZ_OUTPUT_PATH /var/db/timezone/zoneinfo)
ENDIF()


if(NOT ${TD_WINDOWS})
MESSAGE(STATUS "timezone file path: " ${TZ_OUTPUT_PATH})

execute_process(
COMMAND make TZDIR=${TZ_OUTPUT_PATH}/ clean tzdir.h
WORKING_DIRECTORY "${TD_CONTRIB_DIR}/tz"
)

set(TZ_SRC_DIR "${TD_SOURCE_DIR}/source/os/src/timezone")
file(REMOVE_RECURSE ${TZ_SRC_DIR})
file(MAKE_DIRECTORY ${TZ_SRC_DIR})
file(COPY ${TD_CONTRIB_DIR}/tz/private.h ${TD_CONTRIB_DIR}/tz/tzdir.h ${TD_CONTRIB_DIR}/tz/tzfile.h
${TD_CONTRIB_DIR}/tz/localtime.c ${TD_CONTRIB_DIR}/tz/strftime.c
DESTINATION ${TZ_SRC_DIR})
endif(NOT ${TD_WINDOWS})

#if(NOT ${TD_WINDOWS})
# execute_process(
# COMMAND make CFLAGS+=-fPIC CFLAGS+=-g TZDIR=${TZ_OUTPUT_PATH} clean libtz.a
# WORKING_DIRECTORY "${TD_CONTRIB_DIR}/tz"
# )
#endif(NOT ${TD_WINDOWS})
# ================================================================================================
# Build test
# ================================================================================================
Expand Down
5 changes: 4 additions & 1 deletion contrib/test/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ if(${BUILD_WITH_TRAFT})
# add_subdirectory(traft)
endif(${BUILD_WITH_TRAFT})

add_subdirectory(azure)
if(${BUILD_S3})
add_subdirectory(azure)
endif()

add_subdirectory(tdev)
add_subdirectory(lz4)
Binary file modified deps/arm/dm_static/libdmodule.a
Binary file not shown.
Binary file modified deps/darwin/arm/dm_static/libdmodule.a
Binary file not shown.
Binary file modified deps/darwin/x64/dm_static/libdmodule.a
Binary file not shown.
Binary file modified deps/mips/dm_static/libdmodule.a
Binary file not shown.
Binary file modified deps/win/x64/dm_static/dmodule.lib
Binary file not shown.
Binary file modified deps/x86/dm_static/libdmodule.a
Binary file not shown.
4 changes: 2 additions & 2 deletions docs/en/06-advanced/05-data-in/07-mqtt.md
Original file line number Diff line number Diff line change
Expand Up @@ -112,14 +112,14 @@ Fill in the example data from the MQTT message body in **Message Body**.

JSON data supports JSONObject or JSONArray, and the json parser can parse the following data:

``` json
```json
{"id": 1, "message": "hello-word"}
{"id": 2, "message": "hello-word"}
```

or

``` json
```json
[{"id": 1, "message": "hello-word"},{"id": 2, "message": "hello-word"}]
```

Expand Down
6 changes: 3 additions & 3 deletions docs/en/06-advanced/05-data-in/08-kafka.md
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ In addition, the [Kerberos](https://web.mit.edu/kerberos/) authentication servic

After configuration, you can use the [kcat](https://github.com/edenhill/kcat) tool to verify Kafka topic consumption:

```bash
```shell
kcat <topic> \
-b <kafka-server:port> \
-G kcat \
Expand Down Expand Up @@ -171,14 +171,14 @@ Enter sample data from the Kafka message body in **Message Body**.

JSON data supports JSONObject or JSONArray, and the following data can be parsed using a JSON parser:

``` json
```json
{"id": 1, "message": "hello-word"}
{"id": 2, "message": "hello-word"}
```

or

``` json
```json
[{"id": 1, "message": "hello-word"},{"id": 2, "message": "hello-word"}]
```

Expand Down
12 changes: 6 additions & 6 deletions docs/en/06-advanced/05-data-in/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -83,15 +83,15 @@ Parsing is the process of parsing unstructured strings into structured data. The

JSON parsing supports JSONObject or JSONArray. The following JSON sample data can automatically parse fields: `groupid`, `voltage`, `current`, `ts`, `inuse`, `location`.

``` json
```json
{"groupid": 170001, "voltage": "221V", "current": 12.3, "ts": "2023-12-18T22:12:00", "inuse": true, "location": "beijing.chaoyang.datun"}
{"groupid": 170001, "voltage": "220V", "current": 12.2, "ts": "2023-12-18T22:12:02", "inuse": true, "location": "beijing.chaoyang.datun"}
{"groupid": 170001, "voltage": "216V", "current": 12.5, "ts": "2023-12-18T22:12:04", "inuse": false, "location": "beijing.chaoyang.datun"}
```

Or

``` json
```json
[{"groupid": 170001, "voltage": "221V", "current": 12.3, "ts": "2023-12-18T22:12:00", "inuse": true, "location": "beijing.chaoyang.datun"},
{"groupid": 170001, "voltage": "220V", "current": 12.2, "ts": "2023-12-18T22:12:02", "inuse": true, "location": "beijing.chaoyang.datun"},
{"groupid": 170001, "voltage": "216V", "current": 12.5, "ts": "2023-12-18T22:12:04", "inuse": false, "location": "beijing.chaoyang.datun"}]
Expand All @@ -101,7 +101,7 @@ Subsequent examples will only explain using JSONObject.

The following nested JSON data can automatically parse fields `groupid`, `data_voltage`, `data_current`, `ts`, `inuse`, `location_0_province`, `location_0_city`, `location_0_datun`, and you can also choose which fields to parse and set aliases for the parsed fields.

``` json
```json
{"groupid": 170001, "data": { "voltage": "221V", "current": 12.3 }, "ts": "2023-12-18T22:12:00", "inuse": true, "location": [{"province": "beijing", "city":"chaoyang", "street": "datun"}]}
```

Expand All @@ -114,7 +114,7 @@ The following nested JSON data can automatically parse fields `groupid`, `data_v

You can use **named capture groups** in regular expressions to extract multiple fields from any string (text) field. As shown in the figure, extract fields such as access IP, timestamp, and accessed URL from nginx logs.

``` re
```regex
(?<ip>\b(?:[0-9]{1,3}\.){3}[0-9]{1,3}\b)\s-\s-\s\[(?<ts>\d{2}/\w{3}/\d{4}:\d{2}:\d{2}:\d{2}\s\+\d{4})\]\s"(?<method>[A-Z]+)\s(?<url>[^\s"]+).*(?<status>\d{3})\s(?<length>\d+)
```

Expand All @@ -133,7 +133,7 @@ Custom rhai syntax scripts for parsing input data (refer to `https://rhai.rs/boo

For example, for data reporting three-phase voltage values, which are entered into three subtables respectively, such data needs to be parsed

``` json
```json
{
"ts": "2024-06-27 18:00:00",
"voltage": "220.1,220.3,221.1",
Expand Down Expand Up @@ -164,7 +164,7 @@ The final parsing result is shown below:

The parsed data may still not meet the data requirements of the target table. For example, the original data collected by a smart meter is as follows (in json format):

``` json
```json
{"groupid": 170001, "voltage": "221V", "current": 12.3, "ts": "2023-12-18T22:12:00", "inuse": true, "location": "beijing.chaoyang.datun"}
{"groupid": 170001, "voltage": "220V", "current": 12.2, "ts": "2023-12-18T22:12:02", "inuse": true, "location": "beijing.chaoyang.datun"}
{"groupid": 170001, "voltage": "216V", "current": 12.5, "ts": "2023-12-18T22:12:04", "inuse": false, "location": "beijing.chaoyang.datun"}
Expand Down
26 changes: 13 additions & 13 deletions docs/en/07-develop/01-connect.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,19 @@ import Tabs from "@theme/Tabs";
import TabItem from "@theme/TabItem";
import Image from '@theme/IdealImage';
import imgConnect from '../assets/connecting-to-tdengine-01.png';
import ConnJava from "./_connect_java.mdx";
import ConnGo from "./_connect_go.mdx";
import ConnRust from "./_connect_rust.mdx";
import ConnNode from "./_connect_node.mdx";
import ConnPythonNative from "./_connect_python.mdx";
import ConnCSNative from "./_connect_cs.mdx";
import ConnC from "./_connect_c.mdx";
import InstallOnLinux from "../14-reference/05-connector/_linux_install.mdx";
import InstallOnWindows from "../14-reference/05-connector/_windows_install.mdx";
import InstallOnMacOS from "../14-reference/05-connector/_macos_install.mdx";
import VerifyLinux from "../14-reference/05-connector/_verify_linux.mdx";
import VerifyMacOS from "../14-reference/05-connector/_verify_macos.mdx";
import VerifyWindows from "../14-reference/05-connector/_verify_windows.mdx";
import ConnJava from "../assets/resources/_connect_java.mdx";
import ConnGo from "../assets/resources/_connect_go.mdx";
import ConnRust from "../assets/resources/_connect_rust.mdx";
import ConnNode from "../assets/resources/_connect_node.mdx";
import ConnPythonNative from "../assets/resources/_connect_python.mdx";
import ConnCSNative from "../assets/resources/_connect_cs.mdx";
import ConnC from "../assets/resources/_connect_c.mdx";
import InstallOnLinux from "../assets/resources/_linux_install.mdx";
import InstallOnWindows from "../assets/resources/_windows_install.mdx";
import InstallOnMacOS from "../assets/resources/_macos_install.mdx";
import VerifyLinux from "../assets/resources/_verify_linux.mdx";
import VerifyMacOS from "../assets/resources/_verify_macos.mdx";
import VerifyWindows from "../assets/resources/_verify_windows.mdx";

TDengine provides a rich set of application development interfaces. To facilitate users in quickly developing their applications, TDengine supports connectors for multiple programming languages. The official connectors include support for C/C++, Java, Python, Go, Node.js, C#, Rust, Lua (community contribution), and PHP (community contribution). These connectors support connecting to the TDengine cluster using the native interface (taosc) and REST interface (not supported in some languages yet). Community developers have also contributed several unofficial connectors, such as ADO.NET connector, Lua connector, and PHP connector. Additionally, TDengine can directly call the REST API provided by taosadapter for data writing and querying operations.

Expand Down
10 changes: 5 additions & 5 deletions docs/en/07-develop/02-sql.md
Original file line number Diff line number Diff line change
Expand Up @@ -83,14 +83,14 @@ Next, create a supertable (STABLE) named `meters`, whose table structure include

Create Database

```bash
```shell
curl --location -uroot:taosdata 'http://127.0.0.1:6041/rest/sql' \
--data 'CREATE DATABASE IF NOT EXISTS power'
```

Create Table, specify the database as `power` in the URL

```bash
```shell
curl --location -uroot:taosdata 'http://127.0.0.1:6041/rest/sql/power' \
--data 'CREATE STABLE IF NOT EXISTS meters (ts TIMESTAMP, current FLOAT, voltage INT, phase FLOAT) TAGS (groupId INT, location BINARY(24))'
```
Expand Down Expand Up @@ -167,7 +167,7 @@ NOW is an internal system function, defaulting to the current time of the client

Write data

```bash
```shell
curl --location -uroot:taosdata 'http://127.0.0.1:6041/rest/sql' \
--data 'INSERT INTO power.d1001 USING power.meters TAGS(2,'\''California.SanFrancisco'\'') VALUES (NOW + 1a, 10.30000, 219, 0.31000) (NOW + 2a, 12.60000, 218, 0.33000) (NOW + 3a, 12.30000, 221, 0.31000) power.d1002 USING power.meters TAGS(3, '\''California.SanFrancisco'\'') VALUES (NOW + 1a, 10.30000, 218, 0.25000)'
```
Expand Down Expand Up @@ -247,7 +247,7 @@ Rust connector also supports using **serde** for deserializing to get structured

Query Data

```bash
```shell
curl --location -uroot:taosdata 'http://127.0.0.1:6041/rest/sql' \
--data 'SELECT ts, current, location FROM power.meters limit 100'
```
Expand Down Expand Up @@ -329,7 +329,7 @@ Below are code examples of setting reqId to execute SQL in various language conn

Query data, specify reqId as 3

```bash
```shell
curl --location -uroot:taosdata 'http://127.0.0.1:6041/rest/sql?req_id=3' \
--data 'SELECT ts, current, location FROM power.meters limit 1'
```
Expand Down
18 changes: 9 additions & 9 deletions docs/en/07-develop/09-udf.md
Original file line number Diff line number Diff line change
Expand Up @@ -273,19 +273,19 @@ To better operate the above data structures, some convenience functions are prov

Create table:

```bash
```shell
create table battery(ts timestamp, vol1 float, vol2 float, vol3 float, deviceId varchar(16));
```

Create custom function:

```bash
```shell
create aggregate function max_vol as '/root/udf/libmaxvol.so' outputtype binary(64) bufsize 10240 language 'C';
```

Use custom function:

```bash
```shell
select max_vol(vol1, vol2, vol3, deviceid) from battery;
```
Expand Down Expand Up @@ -334,7 +334,7 @@ When developing UDFs in Python, you need to implement the specified interface fu
The interface for scalar functions is as follows.
```Python
```python
def process(input: datablock) -> tuple[output_type]:
```
Expand All @@ -347,7 +347,7 @@ The main parameters are as follows:
The interface for aggregate functions is as follows.
```Python
```python
def start() -> bytes:
def reduce(inputs: datablock, buf: bytes) -> bytes
def finish(buf: bytes) -> output_type:
Expand All @@ -365,7 +365,7 @@ Finally, when all row data blocks have been processed, the finish function is ca
The interfaces for initialization and destruction are as follows.
```Python
```python
def init()
def destroy()
```
Expand All @@ -381,7 +381,7 @@ Parameter description:
The template for developing scalar functions in Python is as follows.
```Python
```python
def init():
# initialization
def destroy():
Expand All @@ -393,7 +393,7 @@ def process(input: datablock) -> tuple[output_type]:
The template for developing aggregate functions in Python is as follows.
```Python
```python
def init():
#initialization
def destroy():
Expand Down Expand Up @@ -828,7 +828,7 @@ Through this example, we learned how to define aggregate functions and print cus
<details>
<summary>pybitand.py</summary>
```Python
```python
{{#include tests/script/sh/pybitand.py}}
```
Expand Down
Loading

0 comments on commit d2146a2

Please sign in to comment.