Mva Script !!better!! ❲Ad-Free❳

# Step 4: Plot scree plt.figure(figsize=(8,4)) plt.bar(range(1, len(pca.explained_variance_ratio_)+1), pca.explained_variance_ratio_) plt.step(range(1, len(cum_var)+1), cum_var, where='mid', color='red') plt.title('Scree Plot with Cumulative Variance') plt.xlabel('Principal Component') plt.ylabel('Variance Ratio') plt.savefig('scree_plot.png')

# Step 6: Unsupervised clustering (if no labels) if labels is None: # Elbow method inertias = [] K_range = range(2, min(10, data_scaled.shape[0])) for k in K_range: km = KMeans(n_clusters=k, random_state=42) km.fit(data_scaled) inertias.append(km.inertia_) plt.figure() plt.plot(K_range, inertias, 'bo-') plt.xlabel('k') plt.ylabel('Inertia') plt.title('Elbow for k-means') plt.savefig('elbow.png') best_k = K_range[np.argmin(np.diff(inertias))] # simple heuristic km_final = KMeans(n_clusters=best_k, random_state=42) clusters = km_final.fit_predict(data_scaled) print(f"Optimal clusters: {best_k}") return pca_scores, clusters mva script

# Step 2: Scale features scaler = StandardScaler() data_scaled = scaler.fit_transform(data_imp) # Step 4: Plot scree plt

# Step 3: PCA pca = PCA(n_components=min(data_scaled.shape[1], 10)) pca_scores = pca.fit_transform(data_scaled) cum_var = np.cumsum(pca.explained_variance_ratio_) n_comp = np.argmax(cum_var >= variance_threshold) + 1 print(f"Optimal PCA components: {n_comp} (explained {cum_var[n_comp-1]:.2%})") # Step 4: Plot scree plt.figure(figsize=(8

# Step 5: LDA (if labels exist) if labels is not None: lda = LDA(n_components=min(2, len(np.unique(labels))-1)) lda_scores = lda.fit_transform(data_scaled, labels) print("LDA applied. Reduced shape:", lda_scores.shape) # LDA scatter plot plt.figure() for lab in np.unique(labels): subset = lda_scores[labels == lab] plt.scatter(subset[:,0], subset[:,1], label=f'Class {lab}') plt.legend() plt.title('LDA Projection') plt.savefig('lda_plot.png')