Bonjour,
j'ai réussi a l'installer sur le serveur mais en utilisant un job.sh pour executer mes codes, du coup je voulais passer sur jupyter notebook mais j'ai cette erreur: JAVA_HOME is not set
RuntimeError Traceback (most recent call last)
Cell In[1], line 2
1 import hail as hl
----> 2 hl.init()
File :2, in init(sc, app_name, master, local, log, quiet, append, min_block_size, branching_factor, tmp_dir, default_reference, idempotent, global_seed, spark_conf, skip_logging_configuration, local_tmpdir, _optimizer_iterations, backend, driver_cores, driver_memory, worker_cores, worker_memory, gcs_requester_pays_configuration, regions, gcs_bucket_allow_list, copy_spark_log_on_error)
File ~/.local/lib/python3.9/site-packages/hail/typecheck/check.py:585, in _make_dec..wrapper(__original_func, *args, **kwargs)
582 @decorator
583 def wrapper(original_func: Callable[..., T], *args, **kwargs) -> T:
584 args, kwargs = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 585 return original_func(*args, **kwargs)
File ~/.local/lib/python3.9/site-packages/hail/context.py:391, in init(sc, app_name, master, local, log, quiet, append, min_block_size, branching_factor, tmp_dir, default_reference, idempotent, global_seed, spark_conf, skip_logging_configuration, local_tmpdir, _optimizer_iterations, backend, driver_cores, driver_memory, worker_cores, worker_memory, gcs_requester_pays_configuration, regions, gcs_bucket_allow_list, copy_spark_log_on_error)
371 return hail_event_loop().run_until_complete(
372 init_batch(
373 log=log,
(...)
388 )
389 )
390 if backend == 'spark':
--> 391 return init_spark(
392 sc=sc,
393 app_name=app_name,
394 master=master,
395 local=local,
396 min_block_size=min_block_size,
397 branching_factor=branching_factor,
398 spark_conf=spark_conf,
399 _optimizer_iterations=_optimizer_iterations,
400 log=log,
401 quiet=quiet,
402 append=append,
403 tmp_dir=tmp_dir,
404 local_tmpdir=local_tmpdir,
405 default_reference=default_reference,
406 global_seed=global_seed,
407 skip_logging_configuration=skip_logging_configuration,
408 gcs_requester_pays_configuration=gcs_requester_pays_configuration,
409 copy_log_on_error=copy_spark_log_on_error,
410 )
411 if backend == 'local':
412 return init_local(
413 log=log,
414 quiet=quiet,
(...)
420 gcs_requester_pays_configuration=gcs_requester_pays_configuration,
421 )
File :2, in init_spark(sc, app_name, master, local, log, quiet, append, min_block_size, branching_factor, tmp_dir, default_reference, idempotent, global_seed, spark_conf, skip_logging_configuration, local_tmpdir, _optimizer_iterations, gcs_requester_pays_configuration, copy_log_on_error)
File ~/.local/lib/python3.9/site-packages/hail/typecheck/check.py:585, in _make_dec..wrapper(__original_func, *args, **kwargs)
582 @decorator
583 def wrapper(original_func: Callable[..., T], *args, **kwargs) -> T:
584 args, kwargs = check_all(__original_func, args, kwargs, checkers, is_method=is_method)
--> 585 return original_func(*args, **kwargs)
File ~/.local/lib/python3.9/site-packages/hail/context.py:484, in init_spark(sc, app_name, master, local, log, quiet, append, min_block_size, branching_factor, tmp_dir, default_reference, idempotent, global_seed, spark_conf, skip_logging_configuration, local_tmpdir, _optimizer_iterations, gcs_requester_pays_configuration, copy_log_on_error)
475 app_name = app_name or 'Hail'
476 (
477 gcs_requester_pays_project,
478 gcs_requester_pays_buckets,
(...)
482 )
483 )
--> 484 backend = SparkBackend(
485 idempotent,
486 sc,
487 spark_conf,
488 app_name,
489 master,
490 local,
491 log,
492 quiet,
493 append,
494 min_block_size,
495 branching_factor,
496 tmpdir,
497 local_tmpdir,
498 skip_logging_configuration,
499 optimizer_iterations,
500 gcs_requester_pays_project=gcs_requester_pays_project,
501 gcs_requester_pays_buckets=gcs_requester_pays_buckets,
502 copy_log_on_error=copy_log_on_error,
503 )
504 if not backend.fs.exists(tmpdir):
505 backend.fs.mkdir(tmpdir)
File ~/.local/lib/python3.9/site-packages/hail/backend/spark_backend.py:91, in SparkBackend.init(self, idempotent, sc, spark_conf, app_name, master, local, log, quiet, append, min_block_size, branching_factor, tmpdir, local_tmpdir, skip_logging_configuration, optimizer_iterations, gcs_requester_pays_project, gcs_requester_pays_buckets, copy_log_on_error)
86 append_to_comma_separated_list(
87 conf, 'spark.executor.extraClassPath', './hail-all-spark.jar', *extra_classpath
88 )
90 if sc is None:
---> 91 pyspark.SparkContext._ensure_initialized(conf=conf)
92 elif not quiet:
93 sys.stderr.write(
94 'pip-installed Hail requires additional configuration options in Spark referring\n'
95 ' to the path to the Hail Python module directory HAIL_DIR,\n'
(...)
99 ' spark.executor.extraClassPath=./hail-all-spark.jar'
100 )
File ~/.local/lib/python3.9/site-packages/pyspark/context.py:417, in SparkContext._ensure_initialized(cls, instance, gateway, conf)
415 with SparkContext._lock:
416 if not SparkContext._gateway:
--> 417 SparkContext._gateway = gateway or launch_gateway(conf)
418 SparkContext._jvm = SparkContext._gateway.jvm
420 if instance:
File ~/.local/lib/python3.9/site-packages/pyspark/java_gateway.py:106, in launch_gateway(conf, popen_kwargs)
103 time.sleep(0.1)
105 if not os.path.isfile(conn_info_file):
--> 106 raise RuntimeError("Java gateway process exited before sending its port number")
108 with open(conn_info_file, "rb") as info:
109 gateway_port = read_int(info)
RuntimeError: Java gateway process exited before sending its port number
j'essaie de trouver java sur le serveur mais je n'y arrive pas
Merci
Très Cordialement
NadineY