Dica p quem decidir usar Windows Pycharm, atentar locais de diretórios com espaços.
import os
import findspark
os.environ['SPARK_HOME'] = 'C:\\Program Files\\Spark\\spark-3.3.2-bin-hadoop3\\spark-3.3.2-bin-hadoop3'
os.environ['JAVA_HOME'] = 'C:\\Program Files\\Java\\jdk-18.0.2.1'
os.environ['HADOOP_HOME'] = 'C:\\winutils.exe'
from pyspark.sql import SparkSession
from pyspark.shell import sc
findspark.init()
sc.setLogLevel('WARN')
spark = SparkSession.builder.master('local[*]').appName("Iniciando_spark").getOrCreate()
sc = spark.sparkContext