LLVM: lib/Analysis/models/interactive_host.py Source File (original) (raw)

3Use it from pass-specific tests by providing a main .py which calls this library's

4`run_interactive` with an appropriate callback to provide advice.

24 """Send the `value` - currently just a scalar - formatted as per `spec`."""

25

26

27 assert spec.element_type == ctypes.c_int64

28 to_send = ctypes.c_int64(int(value))

29 assert f.write(bytes(to_send)) == ctypes.sizeof(spec.element_type) * math.prod(

30 spec.shape

31 )

32 f.flush()

33

34

36 temp_rootname: str,

38 process_and_args: List[str],

39):

40 """Host the compiler.

41 Args:

42 temp_rootname: the base file name from which to construct the 2 pipes for

43 communicating with the compiler.

44 make_response: a function that, given the current tensor values, provides a

45 response.

46 process_and_args: the full commandline for the compiler. It it assumed it

47 contains a flag poiting to `temp_rootname` so that the InteractiveModeRunner

48 would attempt communication on the same pair as this function opens.

49

50 This function sets up the communication with the compiler - via 2 files named

51 `temp_rootname`.in and `temp_rootname`.out - prints out the received features,

52 and sends back to the compiler an advice (which it gets from `make_response`).

53 It's used for testing, and also to showcase how to set up communication in an

54 interactive ML ("gym") environment.

55 """

56 to_compiler = temp_rootname + ".in"

57 from_compiler = temp_rootname + ".out"

58 try:

59 os.mkfifo(to_compiler, 0o666)

60 os.mkfifo(from_compiler, 0o666)

61 compiler_proc = subprocess.Popen(

62 process_and_args, stderr=subprocess.PIPE, stdout=subprocess.DEVNULL

63 )

64 with io.BufferedWriter(io.FileIO(to_compiler, "wb")) as tc:

65 with io.BufferedReader(io.FileIO(from_compiler, "rb")) as fc:

67 context = None

68 while compiler_proc.poll() is None:

69 next_event = fc.readline()

70 if not next_event:

71 break

72 (

73 last_context,

74 observation_id,

75 features,

76 _,

78 context, next_event, fc, tensor_specs, None

79 )

80 if last_context != context:

81 print(f"context: {last_context}")

82 context = last_context

83 print(f"observation: {observation_id}")

84 tensor_values = []

85 for fv in features:

87 tensor_values.append(fv)

88 send(tc, make_response(tensor_values), advice_spec)

89 _, err = compiler_proc.communicate()

90 print(err.decode("utf-8"))

91 compiler_proc.wait()

92

93 finally:

94 os.unlink(to_compiler)

95 os.unlink(from_compiler)