from pyspark.sql import SparkSession
from pyspark.sql.functions import col, lit, concat
創建 SparkSession
spark = SparkSession.builder.appName(“SparkSQLExample”).getOrCreate()
創建 DataFrame(可以是從 CSV、JSON 等文件讀取)
data = [(“Alice”, 586240, 177)] # 注意這里逗號使用的是英文逗號
columns = [“name”, “lac”, “ci”]
df = spark.createDataFrame(data, columns)
創建 CGI 列
df = df.withColumn(
“cgi”,
concat(
lit(“3-”),
(col(“lac”).cast(“integer”) * 256 + col(“ci”).cast(“integer”)).cast(“string”)
)
)
顯示結果
df.show()
df = df.withColumn(
“cgi”,
concat(
lit(“3-”),
(col(“lac”).cast(“int”) * 256 + col(“ci”).cast(“int”)).cast(“string”)
)
)
顯示結果
df.show()
停止 SparkSession
spark.stop()
樣例2:
from pyspark.sql import SparkSession
from pyspark.sql.functions import col, lit, concatspark = SparkSession.builder.appName("SparkSQLExample").getOrCreate()data = [("Alice", 586240, 177)]
columns = ["name", "lac", "ci"]
df = spark.createDataFrame(data, columns)
df = df.withColumn("cgi", concat(lit("3-"), (col("lac").cast("integer") * 256 + col("ci").cast("integer")).cast("string")))
df.show()df = df.withColumn("cgi", concat(lit("3-"), (col("lac").cast("int") * 256 + col("ci").cast("int")).cast("string"))
)# 顯示結果
df.show()# 停止 SparkSession
spark.stop()